From 1b5e64a4d91cf47d10584440f16aac7fa92f7318 Mon Sep 17 00:00:00 2001 From: Suraj Singh Date: Tue, 25 Jul 2023 21:50:53 -0700 Subject: [PATCH 01/75] Skip testPitCreatedOnReplica IT with remote store (#8877) * Skip testPitCreatedOnReplica IT with remote store Signed-off-by: Suraj Singh * Address review comment Signed-off-by: Suraj Singh --------- Signed-off-by: Suraj Singh --- .../opensearch/indices/replication/SegmentReplicationIT.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java index 2a7e8e58b2d03..3ab1a2a8564c5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java @@ -1177,6 +1177,10 @@ public void testScrollWithOngoingSegmentReplication() throws Exception { } public void testPitCreatedOnReplica() throws Exception { + assumeFalse( + "Skipping the test as it is flaky with remote store. Tracking issue https://github.com/opensearch-project/OpenSearch/issues/8850", + segmentReplicationWithRemoteEnabled() + ); final String primary = internalCluster().startDataOnlyNode(); createIndex(INDEX_NAME); ensureYellowAndNoInitializingShards(INDEX_NAME); From e1a41255de203bc57c1e29469a9bb5d50fe0d84b Mon Sep 17 00:00:00 2001 From: Marc Handalian Date: Wed, 26 Jul 2023 01:19:10 -0700 Subject: [PATCH 02/75] Fix testReplicaHasDiffFilesThanPrimary. (#8863) This test is failing in two ways. First it fails when copying segments from the remote store and there is a cksum mismatch. In this case it is not guaranteed the directory implementation will replace the existing file when copying from the store. This change ensures the mismatched file is cleaned up but only if the shard is not serving reads. In that case we fail the shard so it is re-recovered rather than deleting the segment underneath it. This test also fails with a divide by 0 in RemoteStoreRefreshListener. Signed-off-by: Marc Handalian --- .../main/java/org/opensearch/index/shard/IndexShard.java | 8 ++++++++ .../index/shard/RemoteStoreRefreshListener.java | 5 ++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index 0d8a54147d65e..8b6d083379fe1 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -4808,6 +4808,14 @@ private boolean localDirectoryContains(Directory localDirectory, String file, lo return true; } else { logger.warn("Checksum mismatch between local and remote segment file: {}, will override local file", file); + // If there is a checksum mismatch and we are not serving reads it is safe to go ahead and delete the file now. + // Outside of engine resets this method will be invoked during recovery so this is safe. + if (isReadAllowed() == false) { + localDirectory.deleteFile(file); + } else { + // segment conflict with remote store while the shard is serving reads. + failShard("Local copy of segment " + file + " has a different checksum than the version in remote store", null); + } } } catch (NoSuchFileException | FileNotFoundException e) { logger.debug("File {} does not exist in local FS, downloading from remote store", file); diff --git a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java index 3ea8278038ac5..2385b906a7ae5 100644 --- a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java +++ b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java @@ -457,11 +457,10 @@ private void updateLocalSizeMapAndTracker(Collection segmentFiles) { private void updateFinalStatusInSegmentTracker(boolean uploadStatus, long bytesBeforeUpload, long startTimeInNS) { if (uploadStatus) { long bytesUploaded = segmentTracker.getUploadBytesSucceeded() - bytesBeforeUpload; - long timeTakenInMS = (System.nanoTime() - startTimeInNS) / 1_000_000L; - + long timeTakenInMS = TimeValue.nsecToMSec(System.nanoTime() - startTimeInNS); segmentTracker.incrementTotalUploadsSucceeded(); segmentTracker.addUploadBytes(bytesUploaded); - segmentTracker.addUploadBytesPerSec((bytesUploaded * 1_000L) / timeTakenInMS); + segmentTracker.addUploadBytesPerSec((bytesUploaded * 1_000L) / Math.max(1, timeTakenInMS)); segmentTracker.addUploadTimeMs(timeTakenInMS); } else { segmentTracker.incrementTotalUploadsFailed(); From 4fd6877eef7c9cb0b48268a7505506aeb33278a2 Mon Sep 17 00:00:00 2001 From: luyuncheng Date: Thu, 27 Jul 2023 00:30:33 +0800 Subject: [PATCH 03/75] Replace the deprecated IndexReader APIs with new storedFields() & termVectors() (#7792) * 1. Remove calling deprecated document api Signed-off-by: luyuncheng * 1. Remove calling deprecated document api 2. Fixed some calling and Tests Signed-off-by: luyuncheng * 1. Remove calling deprecated document api 2. Fixed some calling and Tests 3. Spotless java Signed-off-by: luyuncheng * add changelog Signed-off-by: luyuncheng * add changelog Signed-off-by: luyuncheng * merge main into branch Signed-off-by: luyuncheng * merge main into branch update CHANGELOG.md Signed-off-by: luyuncheng --------- Signed-off-by: luyuncheng --- CHANGELOG.md | 1 + .../common/lucene/search/XMoreLikeThis.java | 8 +++- .../gateway/PersistedClusterStateService.java | 6 +-- .../index/engine/InternalEngine.java | 4 +- .../index/engine/LuceneChangesSnapshot.java | 2 +- .../index/engine/TranslogLeafReader.java | 41 +++++++++++-------- .../opensearch/index/get/ShardGetService.java | 2 +- .../index/shard/ShardSplittingQuery.java | 2 +- .../index/termvectors/TermVectorsService.java | 7 +++- .../opensearch/search/fetch/FetchPhase.java | 2 +- .../subphase/highlight/HighlightUtils.java | 2 +- .../search/lookup/LeafFieldsLookup.java | 2 +- .../search/lookup/SourceLookup.java | 2 +- .../opensearch/common/lucene/LuceneTests.java | 7 +++- .../lucene/index/FreqTermsEnumTests.java | 4 +- .../index/engine/InternalEngineTests.java | 7 +++- .../RecoverySourcePruneMergePolicyTests.java | 10 +++-- .../query/MoreLikeThisQueryBuilderTests.java | 2 +- .../index/shard/RefreshListenersTests.java | 6 +-- .../index/shard/ShardSplittingQueryTests.java | 2 +- .../indices/IndicesRequestCacheTests.java | 2 +- .../lucene/queries/BlendedTermQueryTests.java | 6 +-- .../search/lookup/LeafFieldsLookupTests.java | 15 +++---- .../slice/DocValuesSliceQueryTests.java | 2 +- .../search/slice/TermsSliceQueryTests.java | 2 +- .../index/engine/EngineTestCase.java | 7 +++- 26 files changed, 92 insertions(+), 61 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e661846a8fc72..a84f5138bd112 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -46,6 +46,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Change http code on create index API with bad input raising NotXContentException from 500 to 400 ([#4773](https://github.com/opensearch-project/OpenSearch/pull/4773)) - Improve summary error message for invalid setting updates ([#4792](https://github.com/opensearch-project/OpenSearch/pull/4792)) - Remote Segment Store Repository setting moved from `index.remote_store.repository` to `index.remote_store.segment.repository` and `cluster.remote_store.repository` to `cluster.remote_store.segment.repository` respectively for Index and Cluster level settings ([#8719](https://github.com/opensearch-project/OpenSearch/pull/8719)) +- Replace the deprecated IndexReader APIs with new storedFields() & termVectors() ([#7792](https://github.com/opensearch-project/OpenSearch/pull/7792)) ### Deprecated diff --git a/server/src/main/java/org/opensearch/common/lucene/search/XMoreLikeThis.java b/server/src/main/java/org/opensearch/common/lucene/search/XMoreLikeThis.java index d7ffa2df943b7..49148890abd55 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/XMoreLikeThis.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/XMoreLikeThis.java @@ -56,7 +56,9 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; +import org.apache.lucene.index.TermVectors; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.BooleanClause; @@ -808,8 +810,10 @@ public String describeParams() { */ private PriorityQueue retrieveTerms(int docNum) throws IOException { Map termFreqMap = new HashMap<>(); + final TermVectors termVectors = ir.termVectors(); + final StoredFields storedFields = ir.storedFields(); for (String fieldName : fieldNames) { - final Fields vectors = ir.getTermVectors(docNum); + final Fields vectors = termVectors.get(docNum); final Terms vector; if (vectors != null) { vector = vectors.terms(fieldName); @@ -819,7 +823,7 @@ private PriorityQueue retrieveTerms(int docNum) throws IOException { // field does not store term vector info if (vector == null) { - Document d = ir.document(docNum); + Document d = storedFields.document(docNum); IndexableField fields[] = d.getFields(fieldName); for (IndexableField field : fields) { final String stringValue = field.stringValue(); diff --git a/server/src/main/java/org/opensearch/gateway/PersistedClusterStateService.java b/server/src/main/java/org/opensearch/gateway/PersistedClusterStateService.java index 8940b0ed25ed4..caddf92150cff 100644 --- a/server/src/main/java/org/opensearch/gateway/PersistedClusterStateService.java +++ b/server/src/main/java/org/opensearch/gateway/PersistedClusterStateService.java @@ -45,6 +45,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SerialMergeScheduler; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; @@ -507,12 +508,11 @@ private static void consumeFromType(IndexSearcher indexSearcher, String type, Ch final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + final StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { logger.trace("processing doc {}", docIdSetIterator.docID()); - bytesRefConsumer.accept( - leafReaderContext.reader().document(docIdSetIterator.docID()).getBinaryValue(DATA_FIELD_NAME) - ); + bytesRefConsumer.accept(storedFields.document(docIdSetIterator.docID()).getBinaryValue(DATA_FIELD_NAME)); } } } diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index 77e2f5cbef7f9..6f8b6d449695e 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -49,6 +49,7 @@ import org.apache.lucene.index.ShuffleForcedMergePolicy; import org.apache.lucene.index.SoftDeletesRetentionMergePolicy; import org.apache.lucene.index.StandardDirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; @@ -2889,6 +2890,7 @@ private void restoreVersionMapAndCheckpointTracker(DirectoryReader directoryRead final CombinedDocValues dv = new CombinedDocValues(leaf.reader()); final IdOnlyFieldVisitor idFieldVisitor = new IdOnlyFieldVisitor(); final DocIdSetIterator iterator = scorer.iterator(); + final StoredFields storedFields = leaf.reader().storedFields(); int docId; while ((docId = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { final long primaryTerm = dv.docPrimaryTerm(docId); @@ -2896,7 +2898,7 @@ private void restoreVersionMapAndCheckpointTracker(DirectoryReader directoryRead localCheckpointTracker.markSeqNoAsProcessed(seqNo); localCheckpointTracker.markSeqNoAsPersisted(seqNo); idFieldVisitor.reset(); - leaf.reader().document(docId, idFieldVisitor); + storedFields.document(docId, idFieldVisitor); if (idFieldVisitor.getId() == null) { assert dv.isTombstone(docId); continue; diff --git a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java index abde2aff6e9e6..23fe59456887e 100644 --- a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java @@ -289,7 +289,7 @@ private Translog.Operation readDocAsOp(int docIndex) throws IOException { ? SourceFieldMapper.RECOVERY_SOURCE_NAME : SourceFieldMapper.NAME; final FieldsVisitor fields = new FieldsVisitor(true, sourceField); - leaf.reader().document(segmentDocID, fields); + leaf.reader().storedFields().document(segmentDocID, fields); final Translog.Operation op; final boolean isTombstone = parallelArray.isTombStone[docIndex]; diff --git a/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java b/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java index 5efafb562df74..417cdd5a8f030 100644 --- a/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java +++ b/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java @@ -221,28 +221,33 @@ public int maxDoc() { @Override public void document(int docID, StoredFieldVisitor visitor) throws IOException { - if (docID != 0) { - throw new IllegalArgumentException("no such doc ID " + docID); - } - if (visitor.needsField(FAKE_SOURCE_FIELD) == StoredFieldVisitor.Status.YES) { - assert operation.source().toBytesRef().offset == 0; - assert operation.source().toBytesRef().length == operation.source().toBytesRef().bytes.length; - visitor.binaryField(FAKE_SOURCE_FIELD, operation.source().toBytesRef().bytes); - } - if (operation.routing() != null && visitor.needsField(FAKE_ROUTING_FIELD) == StoredFieldVisitor.Status.YES) { - visitor.stringField(FAKE_ROUTING_FIELD, operation.routing()); - } - if (visitor.needsField(FAKE_ID_FIELD) == StoredFieldVisitor.Status.YES) { - BytesRef bytesRef = Uid.encodeId(operation.id()); - final byte[] id = new byte[bytesRef.length]; - System.arraycopy(bytesRef.bytes, bytesRef.offset, id, 0, bytesRef.length); - visitor.binaryField(FAKE_ID_FIELD, id); - } + storedFields().document(docID, visitor); } @Override public StoredFields storedFields() throws IOException { - throw new UnsupportedOperationException(); + return new StoredFields() { + @Override + public void document(int docID, StoredFieldVisitor visitor) throws IOException { + if (docID != 0) { + throw new IllegalArgumentException("no such doc ID " + docID); + } + if (visitor.needsField(FAKE_SOURCE_FIELD) == StoredFieldVisitor.Status.YES) { + assert operation.source().toBytesRef().offset == 0; + assert operation.source().toBytesRef().length == operation.source().toBytesRef().bytes.length; + visitor.binaryField(FAKE_SOURCE_FIELD, operation.source().toBytesRef().bytes); + } + if (operation.routing() != null && visitor.needsField(FAKE_ROUTING_FIELD) == StoredFieldVisitor.Status.YES) { + visitor.stringField(FAKE_ROUTING_FIELD, operation.routing()); + } + if (visitor.needsField(FAKE_ID_FIELD) == StoredFieldVisitor.Status.YES) { + BytesRef bytesRef = Uid.encodeId(operation.id()); + final byte[] id = new byte[bytesRef.length]; + System.arraycopy(bytesRef.bytes, bytesRef.offset, id, 0, bytesRef.length); + visitor.binaryField(FAKE_ID_FIELD, id); + } + } + }; } @Override diff --git a/server/src/main/java/org/opensearch/index/get/ShardGetService.java b/server/src/main/java/org/opensearch/index/get/ShardGetService.java index a815074119fb1..b3715e097322d 100644 --- a/server/src/main/java/org/opensearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/opensearch/index/get/ShardGetService.java @@ -276,7 +276,7 @@ private GetResult innerGetLoadFromStoredFields( ); if (fieldVisitor != null) { try { - docIdAndVersion.reader.document(docIdAndVersion.docId, fieldVisitor); + docIdAndVersion.reader.storedFields().document(docIdAndVersion.docId, fieldVisitor); } catch (IOException e) { throw new OpenSearchException("Failed to get id [" + id + "]", e); } diff --git a/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java b/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java index 6d559a6cc0673..219ead931797a 100644 --- a/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java +++ b/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java @@ -286,7 +286,7 @@ public Status needsField(FieldInfo fieldInfo) throws IOException { boolean matches(int doc) throws IOException { routing = id = null; leftToVisit = 2; - leafReader.document(doc, this); + leafReader.storedFields().document(doc, this); assert id != null : "docID must not be null - we might have hit a nested document"; int targetShardId = OperationRouting.generateShardId(indexMetadata, id, routing); return targetShardId != shardId; diff --git a/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java index 0e9d7c2f14113..4d515fadb5a13 100644 --- a/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java @@ -39,6 +39,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.MultiTerms; import org.apache.lucene.index.Term; +import org.apache.lucene.index.TermVectors; import org.apache.lucene.index.Terms; import org.apache.lucene.index.memory.MemoryIndex; import org.opensearch.OpenSearchException; @@ -127,7 +128,8 @@ static TermVectorsResponse getTermVectors(IndexShard indexShard, TermVectorsRequ /* or from an existing document */ else if (docIdAndVersion != null) { // fields with stored term vectors - termVectorsByField = docIdAndVersion.reader.getTermVectors(docIdAndVersion.docId); + TermVectors termVectors = docIdAndVersion.reader.termVectors(); + termVectorsByField = termVectors.get(docIdAndVersion.docId); Set selectedFields = request.selectedFields(); // generate tvs for fields where analyzer is overridden if (selectedFields == null && request.perFieldAnalyzer() != null) { @@ -322,7 +324,8 @@ private static Fields generateTermVectors( } } /* and read vectors from it */ - return index.createSearcher().getIndexReader().getTermVectors(0); + TermVectors termVectors = index.createSearcher().getIndexReader().termVectors(); + return termVectors.get(0); } private static Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVectorsRequest request) throws IOException { diff --git a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java index 97a6093ea39d1..67c6eeae02271 100644 --- a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java @@ -160,7 +160,7 @@ public void execute(SearchContext context) { SequentialStoredFieldsLeafReader lf = (SequentialStoredFieldsLeafReader) currentReaderContext.reader(); fieldReader = lf.getSequentialStoredFieldsReader()::document; } else { - fieldReader = currentReaderContext.reader()::document; + fieldReader = currentReaderContext.reader().storedFields()::document; } for (FetchSubPhaseProcessor processor : processors) { processor.setNextReader(currentReaderContext); diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/HighlightUtils.java b/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/HighlightUtils.java index 7a358b7e4b252..2238554a12149 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/HighlightUtils.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/HighlightUtils.java @@ -72,7 +72,7 @@ public static List loadFieldValues( ) throws IOException { if (forceSource == false && fieldType.isStored()) { CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(fieldType.name()), false); - hitContext.reader().document(hitContext.docId(), fieldVisitor); + hitContext.reader().storedFields().document(hitContext.docId(), fieldVisitor); List textsToHighlight = fieldVisitor.fields().get(fieldType.name()); return textsToHighlight != null ? textsToHighlight : Collections.emptyList(); } diff --git a/server/src/main/java/org/opensearch/search/lookup/LeafFieldsLookup.java b/server/src/main/java/org/opensearch/search/lookup/LeafFieldsLookup.java index 007ee49b3e697..47bb8754a5803 100644 --- a/server/src/main/java/org/opensearch/search/lookup/LeafFieldsLookup.java +++ b/server/src/main/java/org/opensearch/search/lookup/LeafFieldsLookup.java @@ -153,7 +153,7 @@ private FieldLookup loadFieldData(String name) { List values = new ArrayList<>(2); SingleFieldsVisitor visitor = new SingleFieldsVisitor(data.fieldType(), values); try { - reader.document(docId, visitor); + reader.storedFields().document(docId, visitor); } catch (IOException e) { throw new OpenSearchParseException("failed to load field [{}]", e, name); } diff --git a/server/src/main/java/org/opensearch/search/lookup/SourceLookup.java b/server/src/main/java/org/opensearch/search/lookup/SourceLookup.java index 1341fc0fdabb3..00fdca4e143ee 100644 --- a/server/src/main/java/org/opensearch/search/lookup/SourceLookup.java +++ b/server/src/main/java/org/opensearch/search/lookup/SourceLookup.java @@ -140,7 +140,7 @@ public void setSegmentAndDocument(LeafReaderContext context, int docId) { SequentialStoredFieldsLeafReader lf = (SequentialStoredFieldsLeafReader) context.reader(); fieldReader = lf.getSequentialStoredFieldsReader()::document; } else { - fieldReader = context.reader()::document; + fieldReader = context.reader().storedFields()::document; } } catch (IOException e) { throw new UncheckedIOException(e); diff --git a/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java b/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java index ef0bedac72541..f7be2c4876e6f 100644 --- a/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java @@ -35,6 +35,7 @@ import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.index.StandardDirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.document.Document; @@ -565,12 +566,13 @@ public void testWrapAllDocsLive() throws Exception { } try (DirectoryReader unwrapped = DirectoryReader.open(writer)) { DirectoryReader reader = Lucene.wrapAllDocsLive(unwrapped); + StoredFields storedFields = reader.storedFields(); assertThat(reader.numDocs(), equalTo(liveDocs.size())); IndexSearcher searcher = new IndexSearcher(reader); Set actualDocs = new HashSet<>(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { - actualDocs.add(reader.document(scoreDoc.doc).get("id")); + actualDocs.add(storedFields.document(scoreDoc.doc).get("id")); } assertThat(actualDocs, equalTo(liveDocs)); } @@ -609,13 +611,14 @@ public void testWrapLiveDocsNotExposeAbortedDocuments() throws Exception { } try (DirectoryReader unwrapped = DirectoryReader.open(writer)) { DirectoryReader reader = Lucene.wrapAllDocsLive(unwrapped); + StoredFields storedFields = reader.storedFields(); assertThat(reader.maxDoc(), equalTo(numDocs + abortedDocs)); assertThat(reader.numDocs(), equalTo(liveDocs.size())); IndexSearcher searcher = new IndexSearcher(reader); List actualDocs = new ArrayList<>(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { - actualDocs.add(reader.document(scoreDoc.doc).get("id")); + actualDocs.add(storedFields.document(scoreDoc.doc).get("id")); } assertThat(actualDocs, equalTo(liveDocs)); } diff --git a/server/src/test/java/org/opensearch/common/lucene/index/FreqTermsEnumTests.java b/server/src/test/java/org/opensearch/common/lucene/index/FreqTermsEnumTests.java index 60f39208d008a..9423d3c17c98a 100644 --- a/server/src/test/java/org/opensearch/common/lucene/index/FreqTermsEnumTests.java +++ b/server/src/test/java/org/opensearch/common/lucene/index/FreqTermsEnumTests.java @@ -43,6 +43,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.Query; @@ -149,9 +150,10 @@ public void setUp() throws Exception { // now go over each doc, build the relevant references and filter reader = DirectoryReader.open(iw); + StoredFields storedFields = reader.storedFields(); List filterTerms = new ArrayList<>(); for (int docId = 0; docId < reader.maxDoc(); docId++) { - Document doc = reader.document(docId); + Document doc = storedFields.document(docId); addFreqs(doc, referenceAll); if (!deletedIds.contains(doc.getField("id").stringValue())) { addFreqs(doc, referenceNotDeleted); diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index 612bebf74bc70..c2ac8b0e1d3b3 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -2541,6 +2541,7 @@ class OpAndVersion { final Term uidTerm = newUid(doc); engine.index(indexForDoc(doc)); final BiFunction searcherFactory = engine::acquireSearcher; + for (int i = 0; i < thread.length; i++) { thread[i] = new Thread(() -> { startGun.countDown(); @@ -2549,10 +2550,12 @@ class OpAndVersion { } catch (InterruptedException e) { throw new AssertionError(e); } + for (int op = 0; op < opsPerThread; op++) { try (Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.id(), uidTerm), searcherFactory)) { + FieldsVisitor visitor = new FieldsVisitor(true); - get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor); + get.docIdAndVersion().reader.storedFields().document(get.docIdAndVersion().docId, visitor); List values = new ArrayList<>(Strings.commaDelimitedListToSet(visitor.source().utf8ToString())); String removed = op % 3 == 0 && values.size() > 0 ? values.remove(0) : null; String added = "v_" + idGenerator.incrementAndGet(); @@ -2608,7 +2611,7 @@ class OpAndVersion { try (Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.id(), uidTerm), searcherFactory)) { FieldsVisitor visitor = new FieldsVisitor(true); - get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor); + get.docIdAndVersion().reader.storedFields().document(get.docIdAndVersion().docId, visitor); List values = Arrays.asList(Strings.commaDelimitedListToStringArray(visitor.source().utf8ToString())); assertThat(currentValues, equalTo(new HashSet<>(values))); } diff --git a/server/src/test/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicyTests.java b/server/src/test/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicyTests.java index a8279ed908779..fed521e2d5ed9 100644 --- a/server/src/test/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicyTests.java +++ b/server/src/test/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicyTests.java @@ -49,6 +49,7 @@ import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.ShuffleForcedMergePolicy; import org.apache.lucene.index.StandardDirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.MatchAllDocsQuery; @@ -89,8 +90,9 @@ public void testPruneAll() throws IOException { writer.forceMerge(1); writer.commit(); try (DirectoryReader reader = DirectoryReader.open(writer)) { + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); assertEquals(1, document.getFields().size()); assertEquals("source", document.getFields().get(0).name()); } @@ -157,11 +159,12 @@ public void testPruneSome() throws IOException { writer.forceMerge(1); writer.commit(); try (DirectoryReader reader = DirectoryReader.open(writer)) { + StoredFields storedFields = reader.storedFields(); assertEquals(1, reader.leaves().size()); NumericDocValues extra_source = reader.leaves().get(0).reader().getNumericDocValues("extra_source"); assertNotNull(extra_source); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); Set collect = document.getFields().stream().map(IndexableField::name).collect(Collectors.toSet()); assertTrue(collect.contains("source")); assertTrue(collect.contains("even")); @@ -197,11 +200,12 @@ public void testPruneNone() throws IOException { writer.forceMerge(1); writer.commit(); try (DirectoryReader reader = DirectoryReader.open(writer)) { + StoredFields storedFields = reader.storedFields(); assertEquals(1, reader.leaves().size()); NumericDocValues extra_source = reader.leaves().get(0).reader().getNumericDocValues("extra_source"); assertNotNull(extra_source); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); Set collect = document.getFields().stream().map(IndexableField::name).collect(Collectors.toSet()); assertTrue(collect.contains("source")); assertTrue(collect.contains("extra_source")); diff --git a/server/src/test/java/org/opensearch/index/query/MoreLikeThisQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/MoreLikeThisQueryBuilderTests.java index e6ec132a10474..2d950f0994976 100644 --- a/server/src/test/java/org/opensearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -268,7 +268,7 @@ private static Fields generateFields(String[] fieldNames, String text) throws IO for (String fieldName : fieldNames) { index.addField(fieldName, text, new WhitespaceAnalyzer()); } - return index.createSearcher().getIndexReader().getTermVectors(0); + return index.createSearcher().getIndexReader().termVectors().get(0); } @Override diff --git a/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java index f812312004b21..1be6c07539cb7 100644 --- a/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java @@ -36,6 +36,7 @@ import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; @@ -369,9 +370,8 @@ public void testLotsOfThreads() throws Exception { try (Engine.GetResult getResult = engine.get(get, engine::acquireSearcher)) { assertTrue("document not found", getResult.exists()); assertEquals(iteration, getResult.version()); - org.apache.lucene.document.Document document = getResult.docIdAndVersion().reader.document( - getResult.docIdAndVersion().docId - ); + StoredFields storedFields = getResult.docIdAndVersion().reader.storedFields(); + org.apache.lucene.document.Document document = storedFields.document(getResult.docIdAndVersion().docId); assertThat(document.getValues("test"), arrayContaining(testFieldValue)); } } catch (Exception t) { diff --git a/server/src/test/java/org/opensearch/index/shard/ShardSplittingQueryTests.java b/server/src/test/java/org/opensearch/index/shard/ShardSplittingQueryTests.java index 04dcea210640c..940d9a4ead5f9 100644 --- a/server/src/test/java/org/opensearch/index/shard/ShardSplittingQueryTests.java +++ b/server/src/test/java/org/opensearch/index/shard/ShardSplittingQueryTests.java @@ -326,7 +326,7 @@ void assertSplit(Directory dir, IndexMetadata metadata, int targetShardId, boole } assertEquals(shard_id.docID(), doc); long shardID = shard_id.nextValue(); - BytesRef id = reader.document(doc).getBinaryValue("_id"); + BytesRef id = reader.storedFields().document(doc).getBinaryValue("_id"); String actualId = Uid.decodeId(id.bytes, id.offset, id.length); assertNotEquals(ctx.reader() + " docID: " + doc + " actualID: " + actualId, shardID, targetShardId); } diff --git a/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java index 3508020cb23d7..a06688150a38a 100644 --- a/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java @@ -353,7 +353,7 @@ public BytesReference get() { IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("id", Integer.toString(id))), 1); assertEquals(1, topDocs.totalHits.value); - Document document = reader.document(topDocs.scoreDocs[0].doc); + Document document = reader.storedFields().document(topDocs.scoreDocs[0].doc); out.writeString(document.get("value")); loadedFromCache = false; return out.bytes(); diff --git a/server/src/test/java/org/opensearch/lucene/queries/BlendedTermQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/BlendedTermQueryTests.java index bf9aac344e3df..6844742759883 100644 --- a/server/src/test/java/org/opensearch/lucene/queries/BlendedTermQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/BlendedTermQueryTests.java @@ -112,7 +112,7 @@ public void testDismaxQuery() throws IOException { query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "generator"), 0.1f), BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } { BooleanQuery.Builder query = new BooleanQuery.Builder(); @@ -134,7 +134,7 @@ public void testDismaxQuery() throws IOException { query.add(gen, BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 4); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(1), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } { @@ -269,7 +269,7 @@ public void testMinTTF() throws IOException { Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } reader.close(); w.close(); diff --git a/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java b/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java index 0155e288a96fd..85aacfbd63ee2 100644 --- a/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java +++ b/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java @@ -36,6 +36,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.StoredFieldVisitor; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.VectorEncoding; import org.apache.lucene.index.VectorSimilarityFunction; import org.opensearch.index.mapper.MappedFieldType; @@ -43,11 +44,11 @@ import org.opensearch.test.OpenSearchTestCase; import org.junit.Before; +import java.io.IOException; import java.util.Collections; import java.util.List; import static org.mockito.Mockito.any; -import static org.mockito.Mockito.anyInt; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -88,12 +89,12 @@ public void setUp() throws Exception { ); LeafReader leafReader = mock(LeafReader.class); - doAnswer(invocation -> { - Object[] args = invocation.getArguments(); - StoredFieldVisitor visitor = (StoredFieldVisitor) args[1]; - visitor.doubleField(mockFieldInfo, 2.718); - return null; - }).when(leafReader).document(anyInt(), any(StoredFieldVisitor.class)); + doAnswer(invocation -> new StoredFields() { + @Override + public void document(int docID, StoredFieldVisitor visitor) throws IOException { + visitor.doubleField(mockFieldInfo, 2.718); + } + }).when(leafReader).storedFields(); fieldsLookup = new LeafFieldsLookup(mapperService, leafReader); } diff --git a/server/src/test/java/org/opensearch/search/slice/DocValuesSliceQueryTests.java b/server/src/test/java/org/opensearch/search/slice/DocValuesSliceQueryTests.java index 86dc27d73d21f..4d2a441a180a6 100644 --- a/server/src/test/java/org/opensearch/search/slice/DocValuesSliceQueryTests.java +++ b/server/src/test/java/org/opensearch/search/slice/DocValuesSliceQueryTests.java @@ -110,7 +110,7 @@ public void setScorer(Scorable scorer) throws IOException {} @Override public void collect(int doc) throws IOException { - Document d = context.reader().document(doc, Collections.singleton("uuid")); + Document d = context.reader().storedFields().document(doc, Collections.singleton("uuid")); String uuid = d.get("uuid"); assertThat(keys.contains(uuid), equalTo(true)); keys.remove(uuid); diff --git a/server/src/test/java/org/opensearch/search/slice/TermsSliceQueryTests.java b/server/src/test/java/org/opensearch/search/slice/TermsSliceQueryTests.java index 84a65b3c8a7cb..8efa4eeef80dd 100644 --- a/server/src/test/java/org/opensearch/search/slice/TermsSliceQueryTests.java +++ b/server/src/test/java/org/opensearch/search/slice/TermsSliceQueryTests.java @@ -120,7 +120,7 @@ public void setScorer(Scorable scorer) throws IOException {} @Override public void collect(int doc) throws IOException { - Document d = context.reader().document(doc, Collections.singleton("uuid")); + Document d = context.reader().storedFields().document(doc, Collections.singleton("uuid")); String uuid = d.get("uuid"); assertThat(keys.contains(uuid), equalTo(true)); keys.remove(uuid); diff --git a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java index 9c6f36c87896c..1ac92bbb479c3 100644 --- a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java @@ -49,6 +49,7 @@ import org.apache.lucene.index.LiveIndexWriterConfig; import org.apache.lucene.index.MergePolicy; import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; @@ -1302,6 +1303,7 @@ public static List getDocIds(Engine engine, boolean refresh NumericDocValues primaryTermDocValues = reader.getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME); NumericDocValues versionDocValues = reader.getNumericDocValues(VersionFieldMapper.NAME); Bits liveDocs = reader.getLiveDocs(); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { if (liveDocs == null || liveDocs.get(i)) { if (primaryTermDocValues.advanceExact(i) == false) { @@ -1309,7 +1311,7 @@ public static List getDocIds(Engine engine, boolean refresh continue; } final long primaryTerm = primaryTermDocValues.longValue(); - Document doc = reader.document(i, Sets.newHashSet(IdFieldMapper.NAME, SourceFieldMapper.NAME)); + Document doc = storedFields.document(i, Sets.newHashSet(IdFieldMapper.NAME, SourceFieldMapper.NAME)); BytesRef binaryID = doc.getBinaryValue(IdFieldMapper.NAME); String id = Uid.decodeId(Arrays.copyOfRange(binaryID.bytes, binaryID.offset, binaryID.offset + binaryID.length)); final BytesRef source = doc.getBinaryValue(SourceFieldMapper.NAME); @@ -1463,6 +1465,7 @@ public static void assertAtMostOneLuceneDocumentPerSequenceNumber(IndexSettings for (LeafReaderContext leaf : wrappedReader.leaves()) { NumericDocValues primaryTermDocValues = leaf.reader().getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME); NumericDocValues seqNoDocValues = leaf.reader().getNumericDocValues(SeqNoFieldMapper.NAME); + final StoredFields storedFields = leaf.reader().storedFields(); int docId; while ((docId = seqNoDocValues.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { assertTrue(seqNoDocValues.advanceExact(docId)); @@ -1471,7 +1474,7 @@ public static void assertAtMostOneLuceneDocumentPerSequenceNumber(IndexSettings if (primaryTermDocValues.advanceExact(docId)) { if (seqNos.add(seqNo) == false) { final IdOnlyFieldVisitor idFieldVisitor = new IdOnlyFieldVisitor(); - leaf.reader().document(docId, idFieldVisitor); + storedFields.document(docId, idFieldVisitor); throw new AssertionError("found multiple documents for seq=" + seqNo + " id=" + idFieldVisitor.getId()); } } From 4319f2b571c82d7ec5608ffdc31ca4105961d064 Mon Sep 17 00:00:00 2001 From: Marc Handalian Date: Wed, 26 Jul 2023 10:23:15 -0700 Subject: [PATCH 04/75] Remove unnecessary refresh listeners from NRTReplicationReaderManager. (#8859) * Remove unnecessary refresh listeners from NRTReplicationReaderManager. This change removes RefreshListeners used by InternalEngine to provide waitFor functionality. These listeners were previously registered onto NRT replicas only to be force released on the next refresh cycle without actually refreshing the reader. This change also removes the unnecessary blocking refresh from NRTReaderManager because we no longer have conflicting refresh invocations from scheduledRefresh. Signed-off-by: Marc Handalian * Reduce the amount of docs ingested with testPrimaryRelocation and testPrimaryRelocationWithSegRepFailure. These tests were ingesting 100-1k docs and randomly selecting a refresh policy. Wtih the IMMEDIATE refresh policy a blocking refresh is performed that increase the time required for the primary to block operations for relocation. On my machine this change reduces the test time with max docs from 1m to 5-6s. Signed-off-by: Marc Handalian --------- Signed-off-by: Marc Handalian --- .../SegmentReplicationRelocationIT.java | 4 ++-- .../index/engine/NRTReplicationEngine.java | 21 ++++++------------- .../engine/NRTReplicationReaderManager.java | 2 +- .../opensearch/index/shard/IndexShard.java | 3 ++- .../SegmentReplicationIndexShardTests.java | 6 ++++++ 5 files changed, 17 insertions(+), 19 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java index 7cf7e5148dd4a..3024eeb798b48 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java @@ -60,7 +60,7 @@ public void testPrimaryRelocation() throws Exception { createIndex(1); final String replica = internalCluster().startNode(); ensureGreen(INDEX_NAME); - final int initialDocCount = scaledRandomIntBetween(100, 1000); + final int initialDocCount = scaledRandomIntBetween(10, 100); final WriteRequest.RefreshPolicy refreshPolicy = randomFrom(WriteRequest.RefreshPolicy.values()); final List> pendingIndexResponses = new ArrayList<>(); for (int i = 0; i < initialDocCount; i++) { @@ -137,7 +137,7 @@ public void testPrimaryRelocationWithSegRepFailure() throws Exception { createIndex(1); final String replica = internalCluster().startNode(); ensureGreen(INDEX_NAME); - final int initialDocCount = scaledRandomIntBetween(100, 1000); + final int initialDocCount = scaledRandomIntBetween(10, 100); final WriteRequest.RefreshPolicy refreshPolicy = randomFrom(WriteRequest.RefreshPolicy.values()); final List> pendingIndexResponses = new ArrayList<>(); for (int i = 0; i < initialDocCount; i++) { diff --git a/server/src/main/java/org/opensearch/index/engine/NRTReplicationEngine.java b/server/src/main/java/org/opensearch/index/engine/NRTReplicationEngine.java index a9f7a2e70884c..b55508b7facd3 100644 --- a/server/src/main/java/org/opensearch/index/engine/NRTReplicationEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/NRTReplicationEngine.java @@ -77,9 +77,10 @@ public NRTReplicationEngine(EngineConfig engineConfig) { this.completionStatsCache = new CompletionStatsCache(() -> acquireSearcher("completion_stats")); this.readerManager = readerManager; this.readerManager.addListener(completionStatsCache); - for (ReferenceManager.RefreshListener listener : engineConfig.getExternalRefreshListener()) { - this.readerManager.addListener(listener); - } + // NRT Replicas do not have a concept of Internal vs External reader managers. + // We also do not want to wire up refresh listeners for waitFor & pending refresh location. + // which are the current external listeners set from IndexShard. + // Only wire up the internal listeners. for (ReferenceManager.RefreshListener listener : engineConfig.getInternalRefreshListener()) { this.readerManager.addListener(listener); } @@ -322,22 +323,12 @@ public List segments(boolean verbose) { @Override public void refresh(String source) throws EngineException { - maybeRefresh(source); + // Refresh on this engine should only ever happen in the reader after new segments arrive. } @Override public boolean maybeRefresh(String source) throws EngineException { - ensureOpen(); - try { - return readerManager.maybeRefresh(); - } catch (IOException e) { - try { - failEngine("refresh failed source[" + source + "]", e); - } catch (Exception inner) { - e.addSuppressed(inner); - } - throw new RefreshFailedEngineException(shardId, e); - } + return false; } @Override diff --git a/server/src/main/java/org/opensearch/index/engine/NRTReplicationReaderManager.java b/server/src/main/java/org/opensearch/index/engine/NRTReplicationReaderManager.java index 35409437f605a..7b4c93c7235fe 100644 --- a/server/src/main/java/org/opensearch/index/engine/NRTReplicationReaderManager.java +++ b/server/src/main/java/org/opensearch/index/engine/NRTReplicationReaderManager.java @@ -103,7 +103,7 @@ public void updateSegments(SegmentInfos infos) throws IOException { // is always increased. infos.updateGeneration(currentInfos); currentInfos = infos; - maybeRefreshBlocking(); + maybeRefresh(); } public SegmentInfos getSegmentInfos() { diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index 8b6d083379fe1..e43b9773cc1e0 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -4423,7 +4423,8 @@ public void addRefreshListener(Translog.Location location, Consumer lis readAllowed = isReadAllowed(); } } - if (readAllowed) { + // NRT Replicas will not accept refresh listeners. + if (readAllowed && isSegmentReplicationAllowed() == false) { refreshListeners.addOrNotify(location, listener); } else { // we're not yet ready fo ready for reads, just ignore refresh cycles diff --git a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java index d988e34ef18dc..0c68512f93ea6 100644 --- a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java @@ -120,6 +120,12 @@ public void testReplicationCheckpointNotNullForSegRep() throws IOException { closeShards(indexShard); } + public void testNRTReplicasDoNotAcceptRefreshListeners() throws IOException { + final IndexShard indexShard = newStartedShard(false, settings, new NRTReplicationEngineFactory()); + indexShard.addRefreshListener(mock(Translog.Location.class), Assert::assertFalse); + closeShards(indexShard); + } + public void testSegmentInfosAndReplicationCheckpointTuple() throws Exception { try (ReplicationGroup shards = createGroup(1, settings, new NRTReplicationEngineFactory())) { shards.startAll(); From 99f28cb6f6bd73a6d832c6bbb88e5b9c2ccdb139 Mon Sep 17 00:00:00 2001 From: Varun Jain Date: Wed, 26 Jul 2023 11:17:22 -0700 Subject: [PATCH 05/75] Updated Version.java path from server to libs in version.yml (#8883) * Updating Version.java from server/ to buildSrc/ Signed-off-by: Varun Jain * Adding Changelog Signed-off-by: Varun Jain * Path update Signed-off-by: Varun Jain * Changelog Update Signed-off-by: Varun Jain * Removing Changelog from commit Signed-off-by: Varun Jain --------- Signed-off-by: Varun Jain --- .github/workflows/version.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml index fdf42a9a2731e..764a365e7411c 100644 --- a/.github/workflows/version.yml +++ b/.github/workflows/version.yml @@ -54,8 +54,8 @@ jobs: echo " - \"$CURRENT_VERSION\"" >> .ci/bwcVersions sed -i "s/opensearch = $CURRENT_VERSION/opensearch = $NEXT_VERSION/g" buildSrc/version.properties echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE - sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" server/src/main/java/org/opensearch/Version.java - sed -i "s/CURRENT = $CURRENT_VERSION_UNDERSCORE;/CURRENT = $NEXT_VERSION_UNDERSCORE;/g" server/src/main/java/org/opensearch/Version.java + sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" libs/core/src/main/java/org/opensearch/Version.java + sed -i "s/CURRENT = $CURRENT_VERSION_UNDERSCORE;/CURRENT = $NEXT_VERSION_UNDERSCORE;/g" libs/core/src/main/java/org/opensearch/Version.java - name: Create Pull Request uses: peter-evans/create-pull-request@v3 @@ -82,7 +82,7 @@ jobs: echo Adding bwc version $NEXT_VERSION after $CURRENT_VERSION sed -i "s/- \"$CURRENT_VERSION\"/\0\n - \"$NEXT_VERSION\"/g" .ci/bwcVersions echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE - sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" server/src/main/java/org/opensearch/Version.java + sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" libs/core/src/main/java/org/opensearch/Version.java - name: Create Pull Request uses: peter-evans/create-pull-request@v3 From c25c175c94efa4d90a1d98e3c7eac3e4aad6e41f Mon Sep 17 00:00:00 2001 From: Sachin Kale Date: Thu, 27 Jul 2023 08:34:04 +0530 Subject: [PATCH 06/75] [Remote Store] Add support to restore only unassigned shards of an index (#8792) * Add support to restore only unassigned shards of an index --------- Signed-off-by: Sachin Kale Signed-off-by: Sachin Kale Co-authored-by: Sachin Kale --- CHANGELOG.md | 1 + .../RemoteStoreBaseIntegTestCase.java | 10 +- .../remotestore/RemoteStoreForceMergeIT.java | 12 +- .../opensearch/remotestore/RemoteStoreIT.java | 140 ++++++++++++------ .../snapshots/RestoreSnapshotIT.java | 14 +- .../restore/RestoreRemoteStoreRequest.java | 33 ++++- .../cluster/routing/IndexRoutingTable.java | 12 +- .../cluster/routing/RoutingTable.java | 8 +- .../cluster/RestRestoreRemoteStoreAction.java | 1 + .../opensearch/snapshots/RestoreService.java | 44 ++++-- .../RestoreRemoteStoreRequestTests.java | 2 + .../cluster/routing/RoutingTableTests.java | 34 ++++- .../SegmentReplicationIndexShardTests.java | 3 - 13 files changed, 236 insertions(+), 78 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a84f5138bd112..12dae4fca545e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -46,6 +46,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Change http code on create index API with bad input raising NotXContentException from 500 to 400 ([#4773](https://github.com/opensearch-project/OpenSearch/pull/4773)) - Improve summary error message for invalid setting updates ([#4792](https://github.com/opensearch-project/OpenSearch/pull/4792)) - Remote Segment Store Repository setting moved from `index.remote_store.repository` to `index.remote_store.segment.repository` and `cluster.remote_store.repository` to `cluster.remote_store.segment.repository` respectively for Index and Cluster level settings ([#8719](https://github.com/opensearch-project/OpenSearch/pull/8719)) +- [Remote Store] Add support to restore only unassigned shards of an index ([#8792](https://github.com/opensearch-project/OpenSearch/pull/8792)) - Replace the deprecated IndexReader APIs with new storedFields() & termVectors() ([#7792](https://github.com/opensearch-project/OpenSearch/pull/7792)) ### Deprecated diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java index c5d023bdd7a64..2887fbc56106c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java @@ -26,6 +26,7 @@ import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; +import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; @@ -37,6 +38,13 @@ public class RemoteStoreBaseIntegTestCase extends OpenSearchIntegTestCase { protected static final int REPLICA_COUNT = 1; protected Path absolutePath; protected Path absolutePath2; + private final List documentKeys = List.of( + randomAlphaOfLength(5), + randomAlphaOfLength(5), + randomAlphaOfLength(5), + randomAlphaOfLength(5), + randomAlphaOfLength(5) + ); @Override protected boolean addMockInternalEngine() { @@ -59,7 +67,7 @@ public Settings indexSettings() { IndexResponse indexSingleDoc(String indexName) { return client().prepareIndex(indexName) .setId(UUIDs.randomBase64UUID()) - .setSource(randomAlphaOfLength(5), randomAlphaOfLength(5)) + .setSource(documentKeys.get(randomIntBetween(0, documentKeys.size() - 1)), randomAlphaOfLength(5)) .get(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreForceMergeIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreForceMergeIT.java index b4456f887cbaa..4d5648c74ba5c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreForceMergeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreForceMergeIT.java @@ -104,9 +104,17 @@ private void testRestoreWithMergeFlow(int numberOfIterations, boolean invokeFlus Map indexStats = indexData(numberOfIterations, invokeFlush, flushAfterMerge, deletedDocs); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(INDEX_NAME))); - assertAcked(client().admin().indices().prepareClose(INDEX_NAME)); - client().admin().cluster().restoreRemoteStore(new RestoreRemoteStoreRequest().indices(INDEX_NAME), PlainActionFuture.newFuture()); + boolean restoreAllShards = randomBoolean(); + if (restoreAllShards) { + assertAcked(client().admin().indices().prepareClose(INDEX_NAME)); + } + client().admin() + .cluster() + .restoreRemoteStore( + new RestoreRemoteStoreRequest().indices(INDEX_NAME).restoreAllShards(restoreAllShards), + PlainActionFuture.newFuture() + ); ensureGreen(INDEX_NAME); if (deletedDocs == -1) { diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreIT.java index 42bd4b5173fa3..693c4113f8f3b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreIT.java @@ -11,6 +11,7 @@ import org.hamcrest.MatcherAssert; import org.junit.Before; import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreRequest; +import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreResponse; import org.opensearch.action.admin.indices.delete.DeleteIndexRequest; import org.opensearch.action.admin.indices.recovery.RecoveryResponse; import org.opensearch.action.index.IndexResponse; @@ -18,7 +19,6 @@ import org.opensearch.cluster.health.ClusterHealthStatus; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.routing.RecoverySource; -import org.opensearch.common.UUIDs; import org.opensearch.common.settings.Settings; import org.opensearch.index.shard.RemoteStoreRefreshListener; import org.opensearch.indices.recovery.RecoveryState; @@ -34,15 +34,16 @@ import java.util.HashMap; import java.util.Map; import java.util.Optional; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import static org.hamcrest.Matchers.comparesEqualTo; -import static org.hamcrest.Matchers.oneOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.oneOf; +import static org.hamcrest.Matchers.comparesEqualTo; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; -@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0) +@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 0) public class RemoteStoreIT extends RemoteStoreBaseIntegTestCase { private static final String INDEX_NAME = "remote-store-test-idx-1"; @@ -68,13 +69,6 @@ public Settings indexSettings() { return remoteStoreIndexSettings(0); } - private IndexResponse indexSingleDoc() { - return client().prepareIndex(INDEX_NAME) - .setId(UUIDs.randomBase64UUID()) - .setSource(randomAlphaOfLength(5), randomAlphaOfLength(5)) - .get(); - } - private Map indexData(int numberOfIterations, boolean invokeFlush, String index) { long totalOperations = 0; long refreshedOrFlushedOperations = 0; @@ -93,7 +87,7 @@ private Map indexData(int numberOfIterations, boolean invokeFlush, refreshedOrFlushedOperations = totalOperations; int numberOfOperations = randomIntBetween(20, 50); for (int j = 0; j < numberOfOperations; j++) { - IndexResponse response = INDEX_NAME.equals(index) ? indexSingleDoc() : indexSingleDoc(index); + IndexResponse response = indexSingleDoc(index); maxSeqNo = response.getSeqNo(); shardId = response.getShardId().id(); indexingStats.put(MAX_SEQ_NO_TOTAL + "-shard-" + shardId, maxSeqNo); @@ -109,12 +103,14 @@ private Map indexData(int numberOfIterations, boolean invokeFlush, } private void verifyRestoredData(Map indexStats, boolean checkTotal, String indexName) { + // This is required to get updated number from already active shards which were not restored + refresh(indexName); String statsGranularity = checkTotal ? TOTAL_OPERATIONS : REFRESHED_OR_FLUSHED_OPERATIONS; String maxSeqNoGranularity = checkTotal ? MAX_SEQ_NO_TOTAL : MAX_SEQ_NO_REFRESHED_OR_FLUSHED; ensureYellowAndNoInitializingShards(indexName); ensureGreen(indexName); assertHitCount(client().prepareSearch(indexName).setSize(0).get(), indexStats.get(statsGranularity)); - IndexResponse response = INDEX_NAME.equals(indexName) ? indexSingleDoc() : indexSingleDoc(indexName); + IndexResponse response = indexSingleDoc(indexName); assertEquals(indexStats.get(maxSeqNoGranularity + "-shard-" + response.getShardId().id()) + 1, response.getSeqNo()); refresh(indexName); assertHitCount(client().prepareSearch(indexName).setSize(0).get(), indexStats.get(statsGranularity) + 1); @@ -130,6 +126,28 @@ private void prepareCluster(int numClusterManagerNodes, int numDataOnlyNodes, St } } + private void restore(String... indices) { + boolean restoreAllShards = randomBoolean(); + if (restoreAllShards) { + assertAcked(client().admin().indices().prepareClose(indices)); + } + client().admin() + .cluster() + .restoreRemoteStore( + new RestoreRemoteStoreRequest().indices(indices).restoreAllShards(restoreAllShards), + PlainActionFuture.newFuture() + ); + } + + private void restoreAndVerify(int shardCount, int replicaCount, Map indexStats) { + restore(INDEX_NAME); + ensureGreen(INDEX_NAME); + // This is required to get updated number from already active shards which were not restored + assertEquals(shardCount * (1 + replicaCount), getNumShards(INDEX_NAME).totalNumShards); + assertEquals(replicaCount, getNumShards(INDEX_NAME).numReplicas); + verifyRestoredData(indexStats, true, INDEX_NAME); + } + /** * Helper function to test restoring an index with no replication from remote store. Only primary node is dropped. * @param numberOfIterations Number of times a refresh/flush should be invoked, followed by indexing some data. @@ -144,23 +162,16 @@ private void testRestoreFlow(int numberOfIterations, boolean invokeFlush, int sh internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(INDEX_NAME))); ensureRed(INDEX_NAME); - assertAcked(client().admin().indices().prepareClose(INDEX_NAME)); - client().admin().cluster().restoreRemoteStore(new RestoreRemoteStoreRequest().indices(INDEX_NAME), PlainActionFuture.newFuture()); - - ensureGreen(INDEX_NAME); - assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards); - verifyRestoredData(indexStats, true, INDEX_NAME); + restoreAndVerify(shardCount, 0, indexStats); } /** * Helper function to test restoring an index having replicas from remote store when all the nodes housing the primary/replica drop. - * @param remoteTranslog If true, Remote Translog Store is also enabled in addition to Remote Segment Store. * @param numberOfIterations Number of times a refresh/flush should be invoked, followed by indexing some data. * @param invokeFlush If true, a flush is invoked. Otherwise, a refresh is invoked. * @throws IOException IO Exception. */ - private void testRestoreFlowBothPrimaryReplicasDown(boolean remoteTranslog, int numberOfIterations, boolean invokeFlush, int shardCount) - throws IOException { + private void testRestoreFlowBothPrimaryReplicasDown(int numberOfIterations, boolean invokeFlush, int shardCount) throws IOException { prepareCluster(1, 2, INDEX_NAME, 1, shardCount); Map indexStats = indexData(numberOfIterations, invokeFlush, INDEX_NAME); assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards); @@ -170,14 +181,7 @@ private void testRestoreFlowBothPrimaryReplicasDown(boolean remoteTranslog, int ensureRed(INDEX_NAME); internalCluster().startDataOnlyNodes(2); - assertAcked(client().admin().indices().prepareClose(INDEX_NAME)); - client().admin().cluster().restoreRemoteStore(new RestoreRemoteStoreRequest().indices(INDEX_NAME), PlainActionFuture.newFuture()); - - ensureGreen(INDEX_NAME); - - assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards); - assertEquals(0, getNumShards(INDEX_NAME).numReplicas); - verifyRestoredData(indexStats, true, INDEX_NAME); + restoreAndVerify(shardCount, 1, indexStats); } /** @@ -212,10 +216,16 @@ private void testRestoreFlowMultipleIndices(int numberOfIterations, boolean invo ensureRed(indices); internalCluster().startDataOnlyNodes(3); - assertAcked(client().admin().indices().prepareClose(indices)); + boolean restoreAllShards = randomBoolean(); + if (restoreAllShards) { + assertAcked(client().admin().indices().prepareClose(indices)); + } client().admin() .cluster() - .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(INDEX_NAMES_WILDCARD.split(",")), PlainActionFuture.newFuture()); + .restoreRemoteStore( + new RestoreRemoteStoreRequest().indices(INDEX_NAMES_WILDCARD.split(",")).restoreAllShards(restoreAllShards), + PlainActionFuture.newFuture() + ); ensureGreen(indices); for (String index : indices) { assertEquals(shardCount, getNumShards(index).totalNumShards); @@ -223,6 +233,37 @@ private void testRestoreFlowMultipleIndices(int numberOfIterations, boolean invo } } + public void testRestoreFlowAllShardsNoRedIndex() throws InterruptedException { + int shardCount = randomIntBetween(1, 5); + prepareCluster(0, 3, INDEX_NAME, 0, shardCount); + indexData(randomIntBetween(2, 5), true, INDEX_NAME); + assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards); + + PlainActionFuture future = PlainActionFuture.newFuture(); + client().admin().cluster().restoreRemoteStore(new RestoreRemoteStoreRequest().indices(INDEX_NAME).restoreAllShards(true), future); + try { + future.get(); + } catch (ExecutionException e) { + // If the request goes to co-ordinator, e.getCause() can be RemoteTransportException + assertTrue(e.getCause() instanceof IllegalStateException || e.getCause().getCause() instanceof IllegalStateException); + } + } + + public void testRestoreFlowNoRedIndex() { + int shardCount = randomIntBetween(1, 5); + prepareCluster(0, 3, INDEX_NAME, 0, shardCount); + Map indexStats = indexData(randomIntBetween(2, 5), true, INDEX_NAME); + assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards); + + client().admin() + .cluster() + .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(INDEX_NAME).restoreAllShards(false), PlainActionFuture.newFuture()); + + ensureGreen(INDEX_NAME); + assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards); + verifyRestoredData(indexStats, true, INDEX_NAME); + } + /** * Simulates all data restored using Remote Translog Store. * @throws IOException IO Exception. @@ -265,7 +306,7 @@ public void testRemoteTranslogRestoreWithCommittedData() throws IOException { // @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/6188") @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8479") public void testRTSRestoreWithNoDataPostCommitPrimaryReplicaDown() throws IOException { - testRestoreFlowBothPrimaryReplicasDown(true, 1, true, randomIntBetween(1, 5)); + testRestoreFlowBothPrimaryReplicasDown(1, true, randomIntBetween(1, 5)); } /** @@ -274,7 +315,7 @@ public void testRTSRestoreWithNoDataPostCommitPrimaryReplicaDown() throws IOExce */ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8479") public void testRTSRestoreWithNoDataPostRefreshPrimaryReplicaDown() throws IOException { - testRestoreFlowBothPrimaryReplicasDown(true, 1, false, randomIntBetween(1, 5)); + testRestoreFlowBothPrimaryReplicasDown(1, false, randomIntBetween(1, 5)); } /** @@ -284,7 +325,7 @@ public void testRTSRestoreWithNoDataPostRefreshPrimaryReplicaDown() throws IOExc */ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8479") public void testRTSRestoreWithRefreshedDataPrimaryReplicaDown() throws IOException { - testRestoreFlowBothPrimaryReplicasDown(true, randomIntBetween(2, 5), false, randomIntBetween(1, 5)); + testRestoreFlowBothPrimaryReplicasDown(randomIntBetween(2, 5), false, randomIntBetween(1, 5)); } /** @@ -294,7 +335,7 @@ public void testRTSRestoreWithRefreshedDataPrimaryReplicaDown() throws IOExcepti */ @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8479") public void testRTSRestoreWithCommittedDataPrimaryReplicaDown() throws IOException { - testRestoreFlowBothPrimaryReplicasDown(true, randomIntBetween(2, 5), true, randomIntBetween(1, 5)); + testRestoreFlowBothPrimaryReplicasDown(randomIntBetween(2, 5), true, randomIntBetween(1, 5)); } /** @@ -341,10 +382,7 @@ public void testRTSRestoreWithCommittedDataDefaultAllIndices() throws IOExceptio ensureRed(indices); internalCluster().startDataOnlyNodes(3); - assertAcked(client().admin().indices().prepareClose(indices)); - client().admin() - .cluster() - .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(new String[] {}), PlainActionFuture.newFuture()); + restore(indices); ensureGreen(indices); for (String index : indices) { @@ -381,10 +419,16 @@ public void testRTSRestoreWithCommittedDataNotAllRedRemoteIndices() throws IOExc ensureRed(indices); internalCluster().startDataOnlyNodes(3); - assertAcked(client().admin().indices().prepareClose(indices[0], indices[1])); + boolean restoreAllShards = randomBoolean(); + if (restoreAllShards) { + assertAcked(client().admin().indices().prepareClose(indices[0], indices[1])); + } client().admin() .cluster() - .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(indices[0], indices[1]), PlainActionFuture.newFuture()); + .restoreRemoteStore( + new RestoreRemoteStoreRequest().indices(indices[0], indices[1]).restoreAllShards(restoreAllShards), + PlainActionFuture.newFuture() + ); ensureGreen(indices[0], indices[1]); assertEquals(shardCount, getNumShards(indices[0]).totalNumShards); verifyRestoredData(indicesStats.get(indices[0]), true, indices[0]); @@ -427,10 +471,16 @@ public void testRTSRestoreWithCommittedDataExcludeIndicesPatterns() throws IOExc ensureRed(indices); internalCluster().startDataOnlyNodes(3); - assertAcked(client().admin().indices().prepareClose(indices[0], indices[1])); + boolean restoreAllShards = randomBoolean(); + if (restoreAllShards) { + assertAcked(client().admin().indices().prepareClose(indices[0], indices[1])); + } client().admin() .cluster() - .restoreRemoteStore(new RestoreRemoteStoreRequest().indices("*", "-remote-store-test-index-*"), PlainActionFuture.newFuture()); + .restoreRemoteStore( + new RestoreRemoteStoreRequest().indices("*", "-remote-store-test-index-*").restoreAllShards(restoreAllShards), + PlainActionFuture.newFuture() + ); ensureGreen(indices[0], indices[1]); assertEquals(shardCount, getNumShards(indices[0]).totalNumShards); verifyRestoredData(indicesStats.get(indices[0]), true, indices[0]); @@ -490,7 +540,7 @@ private void testPeerRecovery(int numberOfIterations, boolean invokeFlush) throw assertEquals(0, recoverySource.get().getIndex().recoveredFileCount()); } - IndexResponse response = indexSingleDoc(); + IndexResponse response = indexSingleDoc(INDEX_NAME); assertEquals(indexStats.get(MAX_SEQ_NO_TOTAL) + 1, response.getSeqNo()); refresh(INDEX_NAME); assertBusy( diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java index dbd96a7fd109f..30a836b41e29e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java @@ -276,7 +276,10 @@ public void testRestoreOperationsShallowCopyEnabled() throws IOException, Execut assertAcked(client.admin().indices().prepareClose(restoredIndexName1)); client.admin() .cluster() - .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(restoredIndexName1), PlainActionFuture.newFuture()); + .restoreRemoteStore( + new RestoreRemoteStoreRequest().indices(restoredIndexName1).restoreAllShards(true), + PlainActionFuture.newFuture() + ); ensureYellowAndNoInitializingShards(restoredIndexName1); ensureGreen(restoredIndexName1); assertDocsPresentInIndex(client(), restoredIndexName1, numDocsInIndex1); @@ -434,7 +437,9 @@ public void testRestoreInSameRemoteStoreEnabledIndex() throws IOException { // Re-initialize client to make sure we are not using client from stopped node. client = client(clusterManagerNode); assertAcked(client.admin().indices().prepareClose(indexName1)); - client.admin().cluster().restoreRemoteStore(new RestoreRemoteStoreRequest().indices(indexName1), PlainActionFuture.newFuture()); + client.admin() + .cluster() + .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(indexName1).restoreAllShards(true), PlainActionFuture.newFuture()); ensureYellowAndNoInitializingShards(indexName1); ensureGreen(indexName1); assertDocsPresentInIndex(client(), indexName1, numDocsInIndex1); @@ -515,7 +520,10 @@ public void testRestoreShallowCopySnapshotWithDifferentRepo() throws IOException assertAcked(client.admin().indices().prepareClose(restoredIndexName1)); client.admin() .cluster() - .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(restoredIndexName1), PlainActionFuture.newFuture()); + .restoreRemoteStore( + new RestoreRemoteStoreRequest().indices(restoredIndexName1).restoreAllShards(true), + PlainActionFuture.newFuture() + ); ensureYellowAndNoInitializingShards(restoredIndexName1); ensureGreen(restoredIndexName1); // indexing some new docs and validating diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequest.java index 703b9575a88ad..eb1935158c231 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequest.java @@ -35,7 +35,8 @@ public class RestoreRemoteStoreRequest extends ClusterManagerNodeRequest implements ToXContentObject { private String[] indices = Strings.EMPTY_ARRAY; - private Boolean waitForCompletion; + private Boolean waitForCompletion = false; + private Boolean restoreAllShards = false; public RestoreRemoteStoreRequest() {} @@ -43,6 +44,7 @@ public RestoreRemoteStoreRequest(StreamInput in) throws IOException { super(in); indices = in.readStringArray(); waitForCompletion = in.readOptionalBoolean(); + restoreAllShards = in.readOptionalBoolean(); } @Override @@ -50,6 +52,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeStringArray(indices); out.writeOptionalBoolean(waitForCompletion); + out.writeOptionalBoolean(restoreAllShards); } @Override @@ -118,6 +121,27 @@ public boolean waitForCompletion() { return waitForCompletion; } + /** + * Set the value for restoreAllShards, denoting whether to restore all shards or only unassigned shards + * + * @param restoreAllShards If true, the operation will restore all the shards of the given indices. + * If false, the operation will restore only the unassigned shards of the given indices. + * @return this request + */ + public RestoreRemoteStoreRequest restoreAllShards(boolean restoreAllShards) { + this.restoreAllShards = restoreAllShards; + return this; + } + + /** + * Returns restoreAllShards setting + * + * @return true if the operation will restore all the shards of the given indices + */ + public boolean restoreAllShards() { + return restoreAllShards; + } + /** * Parses restore definition * @@ -167,12 +191,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RestoreRemoteStoreRequest that = (RestoreRemoteStoreRequest) o; - return waitForCompletion == that.waitForCompletion && Arrays.equals(indices, that.indices); + return waitForCompletion == that.waitForCompletion + && restoreAllShards == that.restoreAllShards + && Arrays.equals(indices, that.indices); } @Override public int hashCode() { - int result = Objects.hash(waitForCompletion); + int result = Objects.hash(waitForCompletion, restoreAllShards); result = 31 * result + Arrays.hashCode(indices); return result; } @@ -181,4 +207,5 @@ public int hashCode() { public String toString() { return org.opensearch.common.Strings.toString(XContentType.JSON, this); } + } diff --git a/server/src/main/java/org/opensearch/cluster/routing/IndexRoutingTable.java b/server/src/main/java/org/opensearch/cluster/routing/IndexRoutingTable.java index af348c1c98f2d..781ca5bb2255a 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/IndexRoutingTable.java +++ b/server/src/main/java/org/opensearch/cluster/routing/IndexRoutingTable.java @@ -450,7 +450,11 @@ public Builder initializeAsRestore(IndexMetadata indexMetadata, SnapshotRecovery /** * Initializes an existing index, to be restored from remote store */ - public Builder initializeAsRemoteStoreRestore(IndexMetadata indexMetadata, RemoteStoreRecoverySource recoverySource) { + public Builder initializeAsRemoteStoreRestore( + IndexMetadata indexMetadata, + RemoteStoreRecoverySource recoverySource, + Map activeInitializingShards + ) { final UnassignedInfo unassignedInfo = new UnassignedInfo( UnassignedInfo.Reason.EXISTING_INDEX_RESTORED, "restore_source[remote_store]" @@ -462,7 +466,11 @@ public Builder initializeAsRemoteStoreRestore(IndexMetadata indexMetadata, Remot for (int shardNumber = 0; shardNumber < indexMetadata.getNumberOfShards(); shardNumber++) { ShardId shardId = new ShardId(index, shardNumber); IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(shardId); - indexShardRoutingBuilder.addShard(ShardRouting.newUnassigned(shardId, true, recoverySource, unassignedInfo)); + if (activeInitializingShards.containsKey(shardId)) { + indexShardRoutingBuilder.addShard(activeInitializingShards.get(shardId)); + } else { + indexShardRoutingBuilder.addShard(ShardRouting.newUnassigned(shardId, true, recoverySource, unassignedInfo)); + } shards.put(shardNumber, indexShardRoutingBuilder.build()); } return this; diff --git a/server/src/main/java/org/opensearch/cluster/routing/RoutingTable.java b/server/src/main/java/org/opensearch/cluster/routing/RoutingTable.java index 1bee5d8176a0f..7934649a6d3eb 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/RoutingTable.java +++ b/server/src/main/java/org/opensearch/cluster/routing/RoutingTable.java @@ -562,9 +562,13 @@ public Builder addAsFromOpenToClose(IndexMetadata indexMetadata) { return add(indexRoutingBuilder); } - public Builder addAsRemoteStoreRestore(IndexMetadata indexMetadata, RemoteStoreRecoverySource recoverySource) { + public Builder addAsRemoteStoreRestore( + IndexMetadata indexMetadata, + RemoteStoreRecoverySource recoverySource, + Map activeInitializingShards + ) { IndexRoutingTable.Builder indexRoutingBuilder = new IndexRoutingTable.Builder(indexMetadata.getIndex()) - .initializeAsRemoteStoreRestore(indexMetadata, recoverySource); + .initializeAsRemoteStoreRestore(indexMetadata, recoverySource, activeInitializingShards); add(indexRoutingBuilder); return this; } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRestoreRemoteStoreAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRestoreRemoteStoreAction.java index fca6745167bb4..414c82b4a470f 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRestoreRemoteStoreAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRestoreRemoteStoreAction.java @@ -44,6 +44,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC request.paramAsTime("cluster_manager_timeout", restoreRemoteStoreRequest.masterNodeTimeout()) ); restoreRemoteStoreRequest.waitForCompletion(request.paramAsBoolean("wait_for_completion", false)); + restoreRemoteStoreRequest.restoreAllShards(request.paramAsBoolean("restore_all_shards", false)); request.applyContentParser(p -> restoreRemoteStoreRequest.source(p.mapOrdered())); return channel -> client.admin().cluster().restoreRemoteStore(restoreRemoteStoreRequest, new RestToXContentListener<>(channel)); } diff --git a/server/src/main/java/org/opensearch/snapshots/RestoreService.java b/server/src/main/java/org/opensearch/snapshots/RestoreService.java index 54d55f67ccdcd..d7e89172c5837 100644 --- a/server/src/main/java/org/opensearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/opensearch/snapshots/RestoreService.java @@ -62,6 +62,7 @@ import org.opensearch.cluster.metadata.MetadataIndexUpgradeService; import org.opensearch.cluster.metadata.RepositoriesMetadata; import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.routing.IndexShardRoutingTable; import org.opensearch.cluster.routing.RecoverySource; import org.opensearch.cluster.routing.RecoverySource.SnapshotRecoverySource; import org.opensearch.cluster.routing.RecoverySource.RemoteStoreRecoverySource; @@ -234,21 +235,34 @@ public ClusterState execute(ClusterState currentState) { continue; } if (currentIndexMetadata.getSettings().getAsBoolean(SETTING_REMOTE_STORE_ENABLED, false)) { - if (currentIndexMetadata.getState() != IndexMetadata.State.CLOSE) { - throw new IllegalStateException( - "cannot restore index [" - + index - + "] because an open index " - + "with same name already exists in the cluster. Close the existing index" - ); + IndexMetadata updatedIndexMetadata = currentIndexMetadata; + Map activeInitializingShards = new HashMap<>(); + if (request.restoreAllShards()) { + if (currentIndexMetadata.getState() != IndexMetadata.State.CLOSE) { + throw new IllegalStateException( + "cannot restore index [" + + index + + "] because an open index " + + "with same name already exists in the cluster. Close the existing index" + ); + } + updatedIndexMetadata = IndexMetadata.builder(currentIndexMetadata) + .state(IndexMetadata.State.OPEN) + .version(1 + currentIndexMetadata.getVersion()) + .mappingVersion(1 + currentIndexMetadata.getMappingVersion()) + .settingsVersion(1 + currentIndexMetadata.getSettingsVersion()) + .aliasesVersion(1 + currentIndexMetadata.getAliasesVersion()) + .build(); + } else { + activeInitializingShards = currentState.routingTable() + .index(index) + .shards() + .values() + .stream() + .map(IndexShardRoutingTable::primaryShard) + .filter(shardRouting -> shardRouting.unassigned() == false) + .collect(Collectors.toMap(ShardRouting::shardId, Function.identity())); } - IndexMetadata updatedIndexMetadata = IndexMetadata.builder(currentIndexMetadata) - .state(IndexMetadata.State.OPEN) - .version(1 + currentIndexMetadata.getVersion()) - .mappingVersion(1 + currentIndexMetadata.getMappingVersion()) - .settingsVersion(1 + currentIndexMetadata.getSettingsVersion()) - .aliasesVersion(1 + currentIndexMetadata.getAliasesVersion()) - .build(); IndexId indexId = new IndexId(index, updatedIndexMetadata.getIndexUUID()); @@ -257,7 +271,7 @@ public ClusterState execute(ClusterState currentState) { updatedIndexMetadata.getCreationVersion(), indexId ); - rtBuilder.addAsRemoteStoreRestore(updatedIndexMetadata, recoverySource); + rtBuilder.addAsRemoteStoreRestore(updatedIndexMetadata, recoverySource, activeInitializingShards); blocks.updateBlocks(updatedIndexMetadata); mdBuilder.put(updatedIndexMetadata, true); indicesToBeRestored.add(index); diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequestTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequestTests.java index 81d7074977253..2edfa23286658 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequestTests.java @@ -38,6 +38,7 @@ private RestoreRemoteStoreRequest randomState(RestoreRemoteStoreRequest instance } instance.waitForCompletion(randomBoolean()); + instance.restoreAllShards(randomBoolean()); if (randomBoolean()) { instance.masterNodeTimeout(randomTimeValue()); @@ -76,6 +77,7 @@ public void testSource() throws IOException { RestoreRemoteStoreRequest processed = new RestoreRemoteStoreRequest(); processed.masterNodeTimeout(original.masterNodeTimeout()); processed.waitForCompletion(original.waitForCompletion()); + processed.restoreAllShards(original.restoreAllShards()); processed.source(map); assertEquals(original, processed); diff --git a/server/src/test/java/org/opensearch/cluster/routing/RoutingTableTests.java b/server/src/test/java/org/opensearch/cluster/routing/RoutingTableTests.java index 6b869ffed7d23..0ff9d6f07751a 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/RoutingTableTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/RoutingTableTests.java @@ -49,10 +49,13 @@ import org.junit.Before; import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import java.util.function.Predicate; +import static org.mockito.Mockito.mock; import static org.opensearch.cluster.routing.ShardRoutingState.UNASSIGNED; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -502,13 +505,40 @@ public void testAddAsRemoteStoreRestore() { Version.CURRENT, new IndexId(TEST_INDEX_1, "1") ); - final RoutingTable routingTable = new RoutingTable.Builder().addAsRemoteStoreRestore(indexMetadata, remoteStoreRecoverySource) - .build(); + final RoutingTable routingTable = new RoutingTable.Builder().addAsRemoteStoreRestore( + indexMetadata, + remoteStoreRecoverySource, + new HashMap<>() + ).build(); assertTrue(routingTable.hasIndex(TEST_INDEX_1)); assertEquals(this.numberOfShards, routingTable.allShards(TEST_INDEX_1).size()); assertEquals(this.numberOfShards, routingTable.index(TEST_INDEX_1).shardsWithState(UNASSIGNED).size()); } + public void testAddAsRemoteStoreRestoreWithActiveShards() { + final IndexMetadata indexMetadata = createIndexMetadata(TEST_INDEX_1).state(IndexMetadata.State.OPEN).build(); + final RemoteStoreRecoverySource remoteStoreRecoverySource = new RemoteStoreRecoverySource( + "restore_uuid", + Version.CURRENT, + new IndexId(TEST_INDEX_1, "1") + ); + Map activeInitializingShards = new HashMap<>(); + for (int i = 0; i < randomIntBetween(1, this.numberOfShards); i++) { + activeInitializingShards.put(new ShardId(indexMetadata.getIndex(), i), mock(ShardRouting.class)); + } + final RoutingTable routingTable = new RoutingTable.Builder().addAsRemoteStoreRestore( + indexMetadata, + remoteStoreRecoverySource, + activeInitializingShards + ).build(); + assertTrue(routingTable.hasIndex(TEST_INDEX_1)); + assertEquals(this.numberOfShards, routingTable.allShards(TEST_INDEX_1).size()); + assertEquals( + this.numberOfShards - activeInitializingShards.size(), + routingTable.index(TEST_INDEX_1).shardsWithState(UNASSIGNED).size() + ); + } + /** reverse engineer the in sync aid based on the given indexRoutingTable **/ public static IndexMetadata updateActiveAllocations(IndexRoutingTable indexRoutingTable, IndexMetadata indexMetadata) { IndexMetadata.Builder imdBuilder = IndexMetadata.builder(indexMetadata); diff --git a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java index 0c68512f93ea6..cc4fa6f28bafc 100644 --- a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java @@ -190,7 +190,6 @@ public void testIsSegmentReplicationAllowed_WrongEngineType() throws IOException * reader close operation on replica shard deletes the segment files copied in current round of segment replication. * It does this by blocking the finalizeReplication on replica shard and performing close operation on acquired * searcher that triggers the reader close operation. - * @throws Exception */ public void testSegmentReplication_With_ReaderClosedConcurrently() throws Exception { String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"foo\": { \"type\": \"keyword\"} }}}"; @@ -240,7 +239,6 @@ public void testSegmentReplication_With_ReaderClosedConcurrently() throws Except /** * Similar to test above, this test shows the issue where an engine close operation during active segment replication * can result in Lucene CorruptIndexException. - * @throws Exception */ public void testSegmentReplication_With_EngineClosedConcurrently() throws Exception { String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"foo\": { \"type\": \"keyword\"} }}}"; @@ -289,7 +287,6 @@ public void testSegmentReplication_With_EngineClosedConcurrently() throws Except /** * Verifies that commits on replica engine resulting from engine or reader close does not cleanup the temporary * replication files from ongoing round of segment replication - * @throws Exception */ public void testTemporaryFilesNotCleanup() throws Exception { String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"foo\": { \"type\": \"keyword\"} }}}"; From 91bc891c1e771dda8742105051731247e4198c73 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Thu, 27 Jul 2023 13:30:36 -0400 Subject: [PATCH 07/75] Add 2.9.1 to BWC and known versions (#8923) (#8929) Signed-off-by: Andriy Redko (cherry picked from commit 212dba457d6b2d540694e040a0485529d6e52e12) --- .ci/bwcVersions | 1 + libs/core/src/main/java/org/opensearch/Version.java | 1 + 2 files changed, 2 insertions(+) diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 31b1cb5efe3a1..8cfd636b9fd2e 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -21,4 +21,5 @@ BWC_VERSION: - "2.8.0" - "2.8.1" - "2.9.0" + - "2.9.1" - "2.10.0" diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java index 9329f221922ea..3f83282245fd8 100644 --- a/libs/core/src/main/java/org/opensearch/Version.java +++ b/libs/core/src/main/java/org/opensearch/Version.java @@ -90,6 +90,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_2_8_0 = new Version(2080099, org.apache.lucene.util.Version.LUCENE_9_6_0); public static final Version V_2_8_1 = new Version(2080199, org.apache.lucene.util.Version.LUCENE_9_6_0); public static final Version V_2_9_0 = new Version(2090099, org.apache.lucene.util.Version.LUCENE_9_7_0); + public static final Version V_2_9_1 = new Version(2090199, org.apache.lucene.util.Version.LUCENE_9_7_0); public static final Version V_2_10_0 = new Version(2100099, org.apache.lucene.util.Version.LUCENE_9_7_0); public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_8_0); public static final Version CURRENT = V_3_0_0; From e2a664c49d5a182a2351293ec56bf9dc9b3311d9 Mon Sep 17 00:00:00 2001 From: Andrew Ross Date: Thu, 27 Jul 2023 16:38:44 -0500 Subject: [PATCH 08/75] Fix flakiness in MasterServiceTests.testThrottlingForMultipleTaskTypes (#8901) * Fix flakiness in MasterServiceTests.testThrottlingForMultipleTaskTypes The test configured a [timeout duration of zero][1] for certain tasks and asserted that all tasks were throttled or timed out. This is not a valid assertion because it is possible for a task to complete before the [asynchronous timeout operation runs][2], which means the task would complete successfully. The fix is to adjust the assertion to allow for successful tasks in this case. [1]: https://github.com/opensearch-project/OpenSearch/blob/60985bc300d9eafd36c1ab25d46235e1c925c565/server/src/test/java/org/opensearch/cluster/service/MasterServiceTests.java#L941 [2]: https://github.com/opensearch-project/OpenSearch/blob/9fc3f4096958159ec9b53012fc7ced19fd793e1b/server/src/main/java/org/opensearch/common/util/concurrent/PrioritizedOpenSearchThreadPoolExecutor.java#L266 Signed-off-by: Andrew Ross * Add a deterministic test case for timeout Signed-off-by: Andrew Ross --------- Signed-off-by: Andrew Ross --- .../cluster/service/MasterServiceTests.java | 77 ++++++++++++++++++- 1 file changed, 76 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/opensearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/opensearch/cluster/service/MasterServiceTests.java index 3c27748daa87d..d4804b18bd160 100644 --- a/server/src/test/java/org/opensearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/service/MasterServiceTests.java @@ -86,6 +86,8 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import com.carrotsearch.randomizedtesting.annotations.Timeout; + import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.anyOf; @@ -863,6 +865,7 @@ public ClusterManagerTaskThrottler.ThrottlingKey getClusterManagerThrottlingKey( AtomicInteger throttledTask3 = new AtomicInteger(); AtomicInteger succeededTask1 = new AtomicInteger(); AtomicInteger succeededTask2 = new AtomicInteger(); + AtomicInteger succeededTask3 = new AtomicInteger(); AtomicInteger timedOutTask3 = new AtomicInteger(); final ClusterStateTaskListener listener = new ClusterStateTaskListener() { @@ -880,6 +883,8 @@ public void clusterStateProcessed(String source, ClusterState oldState, ClusterS succeededTask1.incrementAndGet(); } else if (source.equals(task2)) { succeededTask2.incrementAndGet(); + } else if (source.equals(task3)) { + succeededTask3.incrementAndGet(); } latch.countDown(); } @@ -955,7 +960,7 @@ public void run() { assertEquals(numberOfTask1, throttledTask1.get() + succeededTask1.get()); assertEquals(numberOfTask2, succeededTask2.get()); assertEquals(0, throttledTask2.get()); - assertEquals(numberOfTask3, throttledTask3.get() + timedOutTask3.get()); + assertEquals(numberOfTask3, throttledTask3.get() + timedOutTask3.get() + succeededTask3.get()); masterService.close(); } @@ -1378,6 +1383,76 @@ public void testDeprecatedMasterServiceUpdateTaskThreadName() { assertThrows(AssertionError.class, () -> MasterService.assertClusterManagerUpdateThread()); } + @Timeout(millis = 5_000) + public void testTaskTimeout() throws InterruptedException { + try (ClusterManagerService clusterManagerService = createClusterManagerService(true)) { + final AtomicInteger failureCount = new AtomicInteger(); + final AtomicInteger successCount = new AtomicInteger(); + final CountDownLatch taskStartLatch = new CountDownLatch(1); + final CountDownLatch blockingTaskLatch = new CountDownLatch(1); + final CountDownLatch timeoutLatch = new CountDownLatch(1); + final ClusterStateTaskListener blockingListener = new ClusterStateTaskListener() { + @Override + public void onFailure(String source, Exception e) { + fail("Unexpected failure"); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + successCount.incrementAndGet(); + taskStartLatch.countDown(); + try { + blockingTaskLatch.await(); + } catch (InterruptedException e) { + fail("Interrupted"); + } + } + }; + final ClusterStateTaskListener timeoutListener = new ClusterStateTaskListener() { + @Override + public void onFailure(String source, Exception e) { + assertEquals("timeout", source); + failureCount.incrementAndGet(); + timeoutLatch.countDown(); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + fail("Unexpected success"); + } + }; + + final ClusterStateTaskExecutor executor = (currentState, tasks) -> ClusterStateTaskExecutor.ClusterTasksResult.builder() + .successes(tasks) + .build(currentState); + + // start a task and wait for it to start and block on the clusterStateProcessed callback + clusterManagerService.submitStateUpdateTask( + "success", + new Object(), + ClusterStateTaskConfig.build(randomFrom(Priority.values())), + executor, + blockingListener + ); + taskStartLatch.await(); + + // start a second task that is guaranteed to timeout as the first task is still running + clusterManagerService.submitStateUpdateTask( + "timeout", + new Object(), + ClusterStateTaskConfig.build(randomFrom(Priority.values()), TimeValue.timeValueMillis(1L)), + executor, + timeoutListener + ); + + // wait for the timeout to happen, then unblock and assert one success and one failure + timeoutLatch.await(); + blockingTaskLatch.countDown(); + assertEquals(1, failureCount.get()); + assertEquals(1, successCount.get()); + } + } + /** * Returns the cluster state that the cluster-manager service uses (and that is provided by the discovery layer) */ From 4ad418210a51c518119a4c9c565fbf7e9bc4b5c1 Mon Sep 17 00:00:00 2001 From: Marc Handalian Date: Thu, 27 Jul 2023 15:17:15 -0700 Subject: [PATCH 09/75] Fix SegmentReplicationIT.testReplicaHasDiffFilesThanPrimary for node-node replication (#8912) * Fix SegmentReplicationIT.testReplicahasDiffFilesThanPrimary This test is now failing for node-node replication. On the primary shard the prepareSegmentReplication method should cancel any ongoing replication if it is running and start a new sync. Thisis incorrectly using Map.compute which will not replace the existing handler entry in the allocationIdToHandlers map. It will only cancel the existing source handler. As a result this can leave the copyState map with an entry and hold an index commit while the test is cleaning up. The copyState is only cleared when a handler is cancelled directly or from a cluster state update. Signed-off-by: Marc Handalian * PR feedback. Signed-off-by: Marc Handalian --------- Signed-off-by: Marc Handalian --- .../OngoingSegmentReplications.java | 26 ++++++++++---- .../OngoingSegmentReplicationsTests.java | 34 +++++++++++++++++++ 2 files changed, 53 insertions(+), 7 deletions(-) diff --git a/server/src/main/java/org/opensearch/indices/replication/OngoingSegmentReplications.java b/server/src/main/java/org/opensearch/indices/replication/OngoingSegmentReplications.java index e0e356f1531e1..4712ae6c18759 100644 --- a/server/src/main/java/org/opensearch/indices/replication/OngoingSegmentReplications.java +++ b/server/src/main/java/org/opensearch/indices/replication/OngoingSegmentReplications.java @@ -139,13 +139,25 @@ void startSegmentCopy(GetSegmentFilesRequest request, ActionListener { - if (segrepHandler != null) { - logger.warn("Override handler for allocation id {}", request.getTargetAllocationId()); - cancelHandlers(handler -> handler.getAllocationId().equals(request.getTargetAllocationId()), "cancel due to retry"); - } - return createTargetHandler(request.getTargetNode(), copyState, request.getTargetAllocationId(), fileChunkWriter); - }); + final SegmentReplicationSourceHandler newHandler = createTargetHandler( + request.getTargetNode(), + copyState, + request.getTargetAllocationId(), + fileChunkWriter + ); + final SegmentReplicationSourceHandler existingHandler = allocationIdToHandlers.putIfAbsent( + request.getTargetAllocationId(), + newHandler + ); + // If we are already replicating to this allocation Id, cancel the old and replace with a new execution. + // This will clear the old handler & referenced copy state holding an incref'd indexCommit. + if (existingHandler != null) { + logger.warn("Override handler for allocation id {}", request.getTargetAllocationId()); + cancelHandlers(handler -> handler.getAllocationId().equals(request.getTargetAllocationId()), "cancel due to retry"); + assert allocationIdToHandlers.containsKey(request.getTargetAllocationId()) == false; + allocationIdToHandlers.put(request.getTargetAllocationId(), newHandler); + } + assert allocationIdToHandlers.containsKey(request.getTargetAllocationId()); return copyState; } diff --git a/server/src/test/java/org/opensearch/indices/replication/OngoingSegmentReplicationsTests.java b/server/src/test/java/org/opensearch/indices/replication/OngoingSegmentReplicationsTests.java index 3b289114f5ca1..84a53ae22a6bc 100644 --- a/server/src/test/java/org/opensearch/indices/replication/OngoingSegmentReplicationsTests.java +++ b/server/src/test/java/org/opensearch/indices/replication/OngoingSegmentReplicationsTests.java @@ -403,4 +403,38 @@ public void testCancelForMissingIds() throws IOException { assertEquals(0, replications.cachedCopyStateSize()); closeShards(replica_2); } + + public void testPrepareForReplicationAlreadyReplicating() throws IOException { + OngoingSegmentReplications replications = new OngoingSegmentReplications(mockIndicesService, recoverySettings); + final String replicaAllocationId = replica.routingEntry().allocationId().getId(); + final CheckpointInfoRequest request = new CheckpointInfoRequest(1L, replicaAllocationId, primaryDiscoveryNode, testCheckpoint); + + final CopyState copyState = replications.prepareForReplication(request, mock(FileChunkWriter.class)); + + final SegmentReplicationSourceHandler handler = replications.getHandlers().get(replicaAllocationId); + assertEquals(handler.getCopyState(), copyState); + assertEquals(1, copyState.refCount()); + + ReplicationCheckpoint secondCheckpoint = new ReplicationCheckpoint( + testCheckpoint.getShardId(), + testCheckpoint.getPrimaryTerm(), + testCheckpoint.getSegmentsGen(), + testCheckpoint.getSegmentInfosVersion() + 1, + testCheckpoint.getCodec() + ); + + final CheckpointInfoRequest secondRequest = new CheckpointInfoRequest( + 1L, + replicaAllocationId, + primaryDiscoveryNode, + secondCheckpoint + ); + + final CopyState secondCopyState = replications.prepareForReplication(secondRequest, mock(FileChunkWriter.class)); + final SegmentReplicationSourceHandler secondHandler = replications.getHandlers().get(replicaAllocationId); + assertEquals(secondHandler.getCopyState(), secondCopyState); + assertEquals("New copy state is incref'd", 1, secondCopyState.refCount()); + assertEquals("Old copy state is cleaned up", 0, copyState.refCount()); + + } } From 5495c641f894bc79aec5bbe4767b3fc7710ecbe0 Mon Sep 17 00:00:00 2001 From: Andrew Ross Date: Thu, 27 Jul 2023 17:56:38 -0500 Subject: [PATCH 10/75] Configure test-retry plugin filter properly (#8933) The intent of #8825 was to retry only specified tests. The wrong parameter was configured though: ['filter' should be set][1], not 'classRetry'. [1]: https://github.com/gradle/test-retry-gradle-plugin/blob/main/README.adoc#filtering Signed-off-by: Andrew Ross --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index c7b6987b1103f..12499bbf6a817 100644 --- a/build.gradle +++ b/build.gradle @@ -470,7 +470,7 @@ subprojects { maxFailures = 10 } failOnPassedAfterRetry = false - classRetry { + filter { includeClasses.add("org.opensearch.action.admin.cluster.node.tasks.ResourceAwareTasksTests") includeClasses.add("org.opensearch.action.admin.cluster.tasks.PendingTasksBlocksIT") includeClasses.add("org.opensearch.action.admin.indices.create.CreateIndexIT") From 3c973bafe709565380eb4224242e860499fb4acc Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Thu, 27 Jul 2023 18:56:42 -0500 Subject: [PATCH 11/75] [Refactor] MediaTypeParserRegistry to MediaTypeRegistry (#8940) This commit rote refactors MediaTypeParserRegistry to MediaTypeRegistry to make the class naming align with the intention of the logic. The MediaTypeRegistry is a mechanism for downstream extensions to register concrete MediaTypes thus having Parser in the name is unneeded. Signed-off-by: Nicholas Walter Knize --- .../client/indices/CreateIndexRequest.java | 4 +-- .../client/indices/PutMappingRequest.java | 4 +-- .../core/common/io/stream/StreamInput.java | 4 +-- .../opensearch/core/xcontent/MediaType.java | 4 +-- ...erRegistry.java => MediaTypeRegistry.java} | 2 +- .../common/xcontent/XContentType.java | 4 +-- .../common/xcontent/MediaTypeParserTests.java | 27 +++++++++---------- .../opensearch/rest/AbstractRestChannel.java | 4 +-- .../transport/TransportService.java | 4 +-- 9 files changed, 27 insertions(+), 30 deletions(-) rename libs/core/src/main/java/org/opensearch/core/xcontent/{MediaTypeParserRegistry.java => MediaTypeRegistry.java} (99%) diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java b/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java index 3405e7e81e122..16915b32c16fe 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java @@ -47,7 +47,7 @@ import org.opensearch.core.ParseField; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.MediaType; -import org.opensearch.core.xcontent.MediaTypeParserRegistry; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; @@ -187,7 +187,7 @@ public CreateIndexRequest mapping(XContentBuilder source) { */ public CreateIndexRequest mapping(Map source) { try { - XContentBuilder builder = XContentFactory.contentBuilder(MediaTypeParserRegistry.getDefaultMediaType()); + XContentBuilder builder = XContentFactory.contentBuilder(MediaTypeRegistry.getDefaultMediaType()); builder.map(source); return mapping(BytesReference.bytes(builder), builder.contentType()); } catch (IOException e) { diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutMappingRequest.java b/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutMappingRequest.java index d17dc54713789..721d6094f7502 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutMappingRequest.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutMappingRequest.java @@ -40,7 +40,7 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.MediaType; -import org.opensearch.core.xcontent.MediaTypeParserRegistry; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; @@ -111,7 +111,7 @@ public MediaType mediaType() { */ public PutMappingRequest source(Map mappingSource) { try { - XContentBuilder builder = XContentFactory.contentBuilder(MediaTypeParserRegistry.getDefaultMediaType()); + XContentBuilder builder = XContentFactory.contentBuilder(MediaTypeRegistry.getDefaultMediaType()); builder.map(mappingSource); return source(builder); } catch (IOException e) { diff --git a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java index 1d7321bf2c6de..d9040da569345 100644 --- a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java +++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java @@ -54,7 +54,7 @@ import org.opensearch.core.common.Strings; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; import org.opensearch.core.xcontent.MediaType; -import org.opensearch.core.xcontent.MediaTypeParserRegistry; +import org.opensearch.core.xcontent.MediaTypeRegistry; import java.io.ByteArrayInputStream; import java.io.EOFException; @@ -347,7 +347,7 @@ public BigInteger readBigInteger() throws IOException { } public MediaType readMediaType() throws IOException { - return MediaTypeParserRegistry.fromMediaType(readString()); + return MediaTypeRegistry.fromMediaType(readString()); } @Nullable diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/MediaType.java b/libs/core/src/main/java/org/opensearch/core/xcontent/MediaType.java index c1409e551e47d..7193cd3bd97bb 100644 --- a/libs/core/src/main/java/org/opensearch/core/xcontent/MediaType.java +++ b/libs/core/src/main/java/org/opensearch/core/xcontent/MediaType.java @@ -82,7 +82,7 @@ default String mediaType() { * This method will return {@code null} if no match is found */ static MediaType fromFormat(String mediaType) { - return MediaTypeParserRegistry.fromFormat(mediaType); + return MediaTypeRegistry.fromFormat(mediaType); } /** @@ -93,7 +93,7 @@ static MediaType fromFormat(String mediaType) { */ static MediaType fromMediaType(String mediaTypeHeaderValue) { mediaTypeHeaderValue = removeVersionInMediaType(mediaTypeHeaderValue); - return MediaTypeParserRegistry.fromMediaType(mediaTypeHeaderValue); + return MediaTypeRegistry.fromMediaType(mediaTypeHeaderValue); } /** diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeParserRegistry.java b/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeRegistry.java similarity index 99% rename from libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeParserRegistry.java rename to libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeRegistry.java index 62a26b4458b09..8ac92504a12d8 100644 --- a/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeParserRegistry.java +++ b/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeRegistry.java @@ -41,7 +41,7 @@ * * @opensearch.internal */ -public final class MediaTypeParserRegistry { +public final class MediaTypeRegistry { private static Map formatToMediaType = Map.of(); private static Map typeWithSubtypeToMediaType = Map.of(); diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java index 023caa49e1f39..9291981f32113 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java @@ -38,7 +38,7 @@ import org.opensearch.common.xcontent.yaml.YamlXContent; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.xcontent.MediaType; -import org.opensearch.core.xcontent.MediaTypeParserRegistry; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContent; import java.io.IOException; @@ -133,7 +133,7 @@ public XContent xContent() { static { /** a parser of media types */ - MediaTypeParserRegistry.register(XContentType.values(), Map.of("application/*", JSON, "application/x-ndjson", JSON)); + MediaTypeRegistry.register(XContentType.values(), Map.of("application/*", JSON, "application/x-ndjson", JSON)); } private int index; diff --git a/libs/x-content/src/test/java/org/opensearch/common/xcontent/MediaTypeParserTests.java b/libs/x-content/src/test/java/org/opensearch/common/xcontent/MediaTypeParserTests.java index 15492b7351984..64d36f0a8b78f 100644 --- a/libs/x-content/src/test/java/org/opensearch/common/xcontent/MediaTypeParserTests.java +++ b/libs/x-content/src/test/java/org/opensearch/common/xcontent/MediaTypeParserTests.java @@ -32,7 +32,7 @@ package org.opensearch.common.xcontent; -import org.opensearch.core.xcontent.MediaTypeParserRegistry; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.test.OpenSearchTestCase; import java.util.Collections; @@ -46,40 +46,37 @@ public class MediaTypeParserTests extends OpenSearchTestCase { public void testJsonWithParameters() throws Exception { String mediaType = "application/json"; - assertThat(MediaTypeParserRegistry.parseMediaType(mediaType).getParameters(), equalTo(Collections.emptyMap())); - assertThat(MediaTypeParserRegistry.parseMediaType(mediaType + ";").getParameters(), equalTo(Collections.emptyMap())); + assertThat(MediaTypeRegistry.parseMediaType(mediaType).getParameters(), equalTo(Collections.emptyMap())); + assertThat(MediaTypeRegistry.parseMediaType(mediaType + ";").getParameters(), equalTo(Collections.emptyMap())); + assertThat(MediaTypeRegistry.parseMediaType(mediaType + "; charset=UTF-8").getParameters(), equalTo(Map.of("charset", "utf-8"))); assertThat( - MediaTypeParserRegistry.parseMediaType(mediaType + "; charset=UTF-8").getParameters(), - equalTo(Map.of("charset", "utf-8")) - ); - assertThat( - MediaTypeParserRegistry.parseMediaType(mediaType + "; custom=123;charset=UTF-8").getParameters(), + MediaTypeRegistry.parseMediaType(mediaType + "; custom=123;charset=UTF-8").getParameters(), equalTo(Map.of("charset", "utf-8", "custom", "123")) ); } public void testWhiteSpaceInTypeSubtype() { String mediaType = " application/json "; - assertThat(MediaTypeParserRegistry.parseMediaType(mediaType).getMediaType(), equalTo(XContentType.JSON)); + assertThat(MediaTypeRegistry.parseMediaType(mediaType).getMediaType(), equalTo(XContentType.JSON)); assertThat( - MediaTypeParserRegistry.parseMediaType(mediaType + "; custom=123; charset=UTF-8").getParameters(), + MediaTypeRegistry.parseMediaType(mediaType + "; custom=123; charset=UTF-8").getParameters(), equalTo(Map.of("charset", "utf-8", "custom", "123")) ); assertThat( - MediaTypeParserRegistry.parseMediaType(mediaType + "; custom=123;\n charset=UTF-8").getParameters(), + MediaTypeRegistry.parseMediaType(mediaType + "; custom=123;\n charset=UTF-8").getParameters(), equalTo(Map.of("charset", "utf-8", "custom", "123")) ); mediaType = " application / json "; - assertThat(MediaTypeParserRegistry.parseMediaType(mediaType), is(nullValue())); + assertThat(MediaTypeRegistry.parseMediaType(mediaType), is(nullValue())); } public void testInvalidParameters() { String mediaType = "application/json"; - assertThat(MediaTypeParserRegistry.parseMediaType(mediaType + "; keyvalueNoEqualsSign"), is(nullValue())); + assertThat(MediaTypeRegistry.parseMediaType(mediaType + "; keyvalueNoEqualsSign"), is(nullValue())); - assertThat(MediaTypeParserRegistry.parseMediaType(mediaType + "; key = value"), is(nullValue())); - assertThat(MediaTypeParserRegistry.parseMediaType(mediaType + "; key="), is(nullValue())); + assertThat(MediaTypeRegistry.parseMediaType(mediaType + "; key = value"), is(nullValue())); + assertThat(MediaTypeRegistry.parseMediaType(mediaType + "; key="), is(nullValue())); } } diff --git a/server/src/main/java/org/opensearch/rest/AbstractRestChannel.java b/server/src/main/java/org/opensearch/rest/AbstractRestChannel.java index dcee6500325b9..32499b1fc155b 100644 --- a/server/src/main/java/org/opensearch/rest/AbstractRestChannel.java +++ b/server/src/main/java/org/opensearch/rest/AbstractRestChannel.java @@ -36,7 +36,7 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.MediaType; -import org.opensearch.core.xcontent.MediaTypeParserRegistry; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -132,7 +132,7 @@ public XContentBuilder newBuilder(@Nullable MediaType requestContentType, @Nulla responseContentType = requestContentType; } else { // default to JSON output when all else fails - responseContentType = MediaTypeParserRegistry.getDefaultMediaType(); + responseContentType = MediaTypeRegistry.getDefaultMediaType(); } } diff --git a/server/src/main/java/org/opensearch/transport/TransportService.java b/server/src/main/java/org/opensearch/transport/TransportService.java index b8d7d130e846b..c3e287b458fc5 100644 --- a/server/src/main/java/org/opensearch/transport/TransportService.java +++ b/server/src/main/java/org/opensearch/transport/TransportService.java @@ -62,7 +62,7 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.Strings; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; -import org.opensearch.core.xcontent.MediaTypeParserRegistry; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.node.NodeClosedException; import org.opensearch.node.ReportingService; import org.opensearch.tasks.Task; @@ -174,7 +174,7 @@ public void close() {} /** Registers OpenSearch server specific exceptions (exceptions outside of core library) */ OpenSearchServerException.registerExceptions(); // set the default media type to JSON (fallback if a media type is not specified) - MediaTypeParserRegistry.setDefaultMediaType(XContentType.JSON); + MediaTypeRegistry.setDefaultMediaType(XContentType.JSON); } /** does nothing. easy way to ensure class is loaded so the above static block is called to register the streamables */ From 8edc1ddd030e6b867db6f3029cca32a684154632 Mon Sep 17 00:00:00 2001 From: Suraj Singh Date: Thu, 27 Jul 2023 19:00:31 -0700 Subject: [PATCH 12/75] [Segment Replication] Use deterministic mechanism to have concurrent invocation of segment replication (#8937) * [Segment Replication] Use deterministic mechanism to have concurrent invocation of segment replication Signed-off-by: Suraj Singh * Clean up Signed-off-by: Suraj Singh --------- Signed-off-by: Suraj Singh --- .../SegmentReplicationTargetServiceTests.java | 57 +++++++++++++++---- 1 file changed, 46 insertions(+), 11 deletions(-) diff --git a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java index 4643615d45d7e..94e57f4a0d3e4 100644 --- a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java +++ b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java @@ -32,6 +32,7 @@ import org.opensearch.indices.recovery.ForceSyncRequest; import org.opensearch.indices.recovery.RecoverySettings; import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; +import org.opensearch.indices.replication.common.CopyState; import org.opensearch.indices.replication.common.ReplicationCollection; import org.opensearch.indices.replication.common.ReplicationFailedException; import org.opensearch.indices.replication.common.ReplicationLuceneIndex; @@ -49,6 +50,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static org.junit.Assert.assertEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mockito.atLeastOnce; @@ -70,10 +72,7 @@ public class SegmentReplicationTargetServiceTests extends IndexShardTestCase { private IndexShard replicaShard; private IndexShard primaryShard; private ReplicationCheckpoint checkpoint; - private SegmentReplicationSource replicationSource; private SegmentReplicationTargetService sut; - - private ReplicationCheckpoint initialCheckpoint; private ReplicationCheckpoint aheadCheckpoint; private ReplicationCheckpoint newPrimaryCheckpoint; @@ -83,11 +82,10 @@ public class SegmentReplicationTargetServiceTests extends IndexShardTestCase { private DiscoveryNode localNode; private IndicesService indicesService; - private ClusterService clusterService; private SegmentReplicationState state; - private static long TRANSPORT_TIMEOUT = 30000;// 30sec + private static final long TRANSPORT_TIMEOUT = 30000;// 30sec @Override public void setUp() throws Exception { @@ -107,9 +105,6 @@ public void setUp() throws Exception { 0L, replicaShard.getLatestReplicationCheckpoint().getCodec() ); - SegmentReplicationSourceFactory replicationSourceFactory = mock(SegmentReplicationSourceFactory.class); - replicationSource = mock(SegmentReplicationSource.class); - when(replicationSourceFactory.get(replicaShard)).thenReturn(replicationSource); testThreadPool = new TestThreadPool("test", Settings.EMPTY); localNode = new DiscoveryNode("local", buildNewFakeTransportAddress(), Version.CURRENT); @@ -126,7 +121,7 @@ public void setUp() throws Exception { transportService.acceptIncomingRequests(); indicesService = mock(IndicesService.class); - clusterService = mock(ClusterService.class); + ClusterService clusterService = mock(ClusterService.class); ClusterState clusterState = mock(ClusterState.class); RoutingTable mockRoutingTable = mock(RoutingTable.class); when(clusterService.state()).thenReturn(clusterState); @@ -135,7 +130,7 @@ public void setUp() throws Exception { when(clusterState.nodes()).thenReturn(DiscoveryNodes.builder().add(localNode).build()); sut = prepareForReplication(primaryShard, replicaShard, transportService, indicesService, clusterService); - initialCheckpoint = replicaShard.getLatestReplicationCheckpoint(); + ReplicationCheckpoint initialCheckpoint = replicaShard.getLatestReplicationCheckpoint(); aheadCheckpoint = new ReplicationCheckpoint( initialCheckpoint.getShardId(), initialCheckpoint.getPrimaryTerm(), @@ -242,7 +237,46 @@ public void testAlreadyOnNewCheckpoint() { } public void testShardAlreadyReplicating() { - sut.startReplication(replicaShard, mock(SegmentReplicationTargetService.SegmentReplicationListener.class)); + CountDownLatch blockGetCheckpointMetadata = new CountDownLatch(1); + SegmentReplicationSource source = new TestReplicationSource() { + @Override + public void getCheckpointMetadata( + long replicationId, + ReplicationCheckpoint checkpoint, + ActionListener listener + ) { + try { + blockGetCheckpointMetadata.await(); + final CopyState copyState = new CopyState( + ReplicationCheckpoint.empty(primaryShard.shardId(), primaryShard.getLatestReplicationCheckpoint().getCodec()), + primaryShard + ); + listener.onResponse( + new CheckpointInfoResponse(copyState.getCheckpoint(), copyState.getMetadataMap(), copyState.getInfosBytes()) + ); + } catch (InterruptedException | IOException e) { + throw new RuntimeException(e); + } + } + + @Override + public void getSegmentFiles( + long replicationId, + ReplicationCheckpoint checkpoint, + List filesToFetch, + IndexShard indexShard, + ActionListener listener + ) { + listener.onResponse(new GetSegmentFilesResponse(Collections.emptyList())); + } + }; + final SegmentReplicationTarget target = spy( + new SegmentReplicationTarget(replicaShard, source, mock(SegmentReplicationTargetService.SegmentReplicationListener.class)) + ); + // Start first round of segment replication. + sut.startReplication(target); + + // Start second round of segment replication, this should fail to start as first round is still in-progress sut.startReplication(replicaShard, new SegmentReplicationTargetService.SegmentReplicationListener() { @Override public void onReplicationDone(SegmentReplicationState state) { @@ -255,6 +289,7 @@ public void onReplicationFailure(SegmentReplicationState state, ReplicationFaile assertFalse(sendShardFailure); } }); + blockGetCheckpointMetadata.countDown(); } public void testOnNewCheckpointFromNewPrimaryCancelOngoingReplication() throws InterruptedException { From 3952d5e0be809d1fa42d37618edf5f31448e7053 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Thu, 27 Jul 2023 21:15:01 -0500 Subject: [PATCH 13/75] Register MediaTypes through SPI (#8938) * Register MediaTypes through SPI This commit provides a new SPI interface MediaContentProvider. Modules, Plugins, Extensions, implement this interface and provide the concrete MediaType implementations (and MIME aliases) through getMediaTypes and getAdditionalMediaTypes, respectively. This enables downstream extensions (e.g., serverless or cloud native implementations) to register their own custom MediaType and define the serialization format that is registered when the classloader loads the MediaTypeRegistry instead of having to register the types explicitly in application code. Signed-off-by: Nicholas Walter Knize * pass the MediaTypeProvider classloader to the SPI ServiceLoader Signed-off-by: Nicholas Walter Knize --------- Signed-off-by: Nicholas Walter Knize --- .../core/xcontent/MediaTypeRegistry.java | 29 +++++++++++++-- .../core/xcontent/spi/MediaTypeProvider.java | 29 +++++++++++++++ .../core/xcontent/spi/package-info.java | 10 ++++++ .../common/xcontent/XContentType.java | 7 ---- .../common/xcontent/spi/XContentProvider.java | 35 +++++++++++++++++++ .../common/xcontent/spi/package-info.java | 10 ++++++ ...search.core.xcontent.spi.MediaTypeProvider | 9 +++++ .../transport/TransportService.java | 4 --- 8 files changed, 120 insertions(+), 13 deletions(-) create mode 100644 libs/core/src/main/java/org/opensearch/core/xcontent/spi/MediaTypeProvider.java create mode 100644 libs/core/src/main/java/org/opensearch/core/xcontent/spi/package-info.java create mode 100644 libs/x-content/src/main/java/org/opensearch/common/xcontent/spi/XContentProvider.java create mode 100644 libs/x-content/src/main/java/org/opensearch/common/xcontent/spi/package-info.java create mode 100644 libs/x-content/src/main/resources/META-INF/services/org.opensearch.core.xcontent.spi.MediaTypeProvider diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeRegistry.java b/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeRegistry.java index 8ac92504a12d8..b81325f6c7c74 100644 --- a/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeRegistry.java +++ b/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeRegistry.java @@ -32,9 +32,16 @@ package org.opensearch.core.xcontent; +import org.opensearch.core.xcontent.spi.MediaTypeProvider; + +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.ServiceLoader; +import java.util.stream.Collectors; +import java.util.stream.Stream; /** * Parses supported internet media types @@ -48,7 +55,25 @@ public final class MediaTypeRegistry { // Default mediaType singleton private static MediaType DEFAULT_MEDIA_TYPE; - public static void register(MediaType[] acceptedMediaTypes, Map additionalMediaTypes) { + // JSON is a core type, so we create a static instance for implementations that require JSON format (e.g., tests) + // todo we should explore moving the concrete JSON implementation from the xcontent library to core + public static final MediaType JSON; + + static { + List mediaTypes = new ArrayList<>(); + Map amt = new HashMap<>(); + for (MediaTypeProvider provider : ServiceLoader.load(MediaTypeProvider.class, MediaTypeProvider.class.getClassLoader())) { + mediaTypes.addAll(provider.getMediaTypes()); + amt = Stream.of(amt, provider.getAdditionalMediaTypes()) + .flatMap(map -> map.entrySet().stream()) + .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); + } + register(mediaTypes.toArray(new MediaType[0]), amt); + JSON = fromMediaType("application/json"); + setDefaultMediaType(JSON); + } + + private static void register(MediaType[] acceptedMediaTypes, Map additionalMediaTypes) { // ensures the map is not overwritten: Map typeMap = new HashMap<>(typeWithSubtypeToMediaType); Map formatMap = new HashMap<>(formatToMediaType); @@ -150,7 +175,7 @@ public Map getParameters() { } } - public static void setDefaultMediaType(final MediaType mediaType) { + private static void setDefaultMediaType(final MediaType mediaType) { if (DEFAULT_MEDIA_TYPE != null) { throw new RuntimeException( "unable to reset the default media type from current default [" + DEFAULT_MEDIA_TYPE + "] to [" + mediaType + "]" diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/spi/MediaTypeProvider.java b/libs/core/src/main/java/org/opensearch/core/xcontent/spi/MediaTypeProvider.java new file mode 100644 index 0000000000000..eeaadc1698df6 --- /dev/null +++ b/libs/core/src/main/java/org/opensearch/core/xcontent/spi/MediaTypeProvider.java @@ -0,0 +1,29 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.core.xcontent.spi; + +import org.opensearch.core.xcontent.MediaType; + +import java.util.List; +import java.util.Map; + +/** + * Service Provider Interface for plugins, modules, extensions providing + * their own Media Types + * + * @opensearch.experimental + * @opensearch.api + */ +public interface MediaTypeProvider { + /** Extensions that implement their own concrete {@link MediaType}s provide them through this interface method */ + List getMediaTypes(); + + /** Extensions that implement additional {@link MediaType} aliases provide them through this interface method */ + Map getAdditionalMediaTypes(); +} diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/spi/package-info.java b/libs/core/src/main/java/org/opensearch/core/xcontent/spi/package-info.java new file mode 100644 index 0000000000000..67ccd981dafa8 --- /dev/null +++ b/libs/core/src/main/java/org/opensearch/core/xcontent/spi/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Service Provider Interface for extensible media types */ +package org.opensearch.core.xcontent.spi; diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java index 9291981f32113..d418e25ba8292 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java @@ -38,11 +38,9 @@ import org.opensearch.common.xcontent.yaml.YamlXContent; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.xcontent.MediaType; -import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContent; import java.io.IOException; -import java.util.Map; /** * The content type of {@link XContent}. @@ -131,11 +129,6 @@ public XContent xContent() { } }; - static { - /** a parser of media types */ - MediaTypeRegistry.register(XContentType.values(), Map.of("application/*", JSON, "application/x-ndjson", JSON)); - } - private int index; XContentType(int index) { diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/spi/XContentProvider.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/spi/XContentProvider.java new file mode 100644 index 0000000000000..af5ab67507b81 --- /dev/null +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/spi/XContentProvider.java @@ -0,0 +1,35 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.xcontent.spi; + +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.spi.MediaTypeProvider; + +import java.util.List; +import java.util.Map; + +/** + * Media Type implementations provided by xcontent library + * + * @opensearch.internal + */ +public class XContentProvider implements MediaTypeProvider { + /** Returns the concrete {@link MediaType} provided by the xcontent library */ + @Override + public List getMediaTypes() { + return List.of(XContentType.values()); + } + + /** Returns the additional {@link MediaType} aliases provided by the xcontent library */ + @Override + public Map getAdditionalMediaTypes() { + return Map.of("application/*", XContentType.JSON, "application/x-ndjson", XContentType.JSON); + } +} diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/spi/package-info.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/spi/package-info.java new file mode 100644 index 0000000000000..c265021f12763 --- /dev/null +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/spi/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** SPI implementation for the xcontent library */ +package org.opensearch.common.xcontent.spi; diff --git a/libs/x-content/src/main/resources/META-INF/services/org.opensearch.core.xcontent.spi.MediaTypeProvider b/libs/x-content/src/main/resources/META-INF/services/org.opensearch.core.xcontent.spi.MediaTypeProvider new file mode 100644 index 0000000000000..ce3fab93087dd --- /dev/null +++ b/libs/x-content/src/main/resources/META-INF/services/org.opensearch.core.xcontent.spi.MediaTypeProvider @@ -0,0 +1,9 @@ +# +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# + +org.opensearch.common.xcontent.spi.XContentProvider diff --git a/server/src/main/java/org/opensearch/transport/TransportService.java b/server/src/main/java/org/opensearch/transport/TransportService.java index c3e287b458fc5..25293bf97b222 100644 --- a/server/src/main/java/org/opensearch/transport/TransportService.java +++ b/server/src/main/java/org/opensearch/transport/TransportService.java @@ -59,10 +59,8 @@ import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.common.util.io.IOUtils; import org.opensearch.common.lease.Releasable; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.Strings; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; -import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.node.NodeClosedException; import org.opensearch.node.ReportingService; import org.opensearch.tasks.Task; @@ -173,8 +171,6 @@ public void close() {} Streamables.registerStreamables(); /** Registers OpenSearch server specific exceptions (exceptions outside of core library) */ OpenSearchServerException.registerExceptions(); - // set the default media type to JSON (fallback if a media type is not specified) - MediaTypeRegistry.setDefaultMediaType(XContentType.JSON); } /** does nothing. easy way to ensure class is loaded so the above static block is called to register the streamables */ From 723cab632af71e197acee25290def8d6d4b46343 Mon Sep 17 00:00:00 2001 From: Suraj Singh Date: Thu, 27 Jul 2023 22:39:58 -0700 Subject: [PATCH 14/75] Remove testUpdateSegmentsWhileRefreshing flaky test failure (#8943) Signed-off-by: Suraj Singh --- .../NRTReplicationReaderManagerTests.java | 39 ------------------- 1 file changed, 39 deletions(-) diff --git a/server/src/test/java/org/opensearch/index/engine/NRTReplicationReaderManagerTests.java b/server/src/test/java/org/opensearch/index/engine/NRTReplicationReaderManagerTests.java index 98f1a416731e4..d635b38e811c4 100644 --- a/server/src/test/java/org/opensearch/index/engine/NRTReplicationReaderManagerTests.java +++ b/server/src/test/java/org/opensearch/index/engine/NRTReplicationReaderManagerTests.java @@ -46,43 +46,4 @@ public void testCreateNRTreaderManager() throws IOException { } } } - - public void testUpdateSegmentsWhileRefreshing() throws IOException, InterruptedException { - try (final Store store = createStore()) { - store.createEmpty(Version.LATEST); - final DirectoryReader reader = DirectoryReader.open(store.directory()); - NRTReplicationReaderManager readerManager = new NRTReplicationReaderManager( - OpenSearchDirectoryReader.wrap(reader, shardId), - (files) -> {}, - (files) -> {} - ); - - final SegmentInfos infos_2 = readerManager.getSegmentInfos().clone(); - infos_2.changed(); - - Thread refreshThread = new Thread(() -> { - try { - readerManager.maybeRefresh(); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); - Thread updateThread = new Thread(() -> { - try { - readerManager.updateSegments(infos_2); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); - refreshThread.start(); - updateThread.start(); - refreshThread.join(); - updateThread.join(); - try (final OpenSearchDirectoryReader acquire = readerManager.acquire()) { - final StandardDirectoryReader standardReader = NRTReplicationReaderManager.unwrapStandardReader(acquire); - assertEquals(infos_2.version, standardReader.getSegmentInfos().version); - } - assertEquals(infos_2, readerManager.getSegmentInfos()); - } - } } From c43743dd8ac132314dfe375f712c37a7a0f7d8df Mon Sep 17 00:00:00 2001 From: Sarthak Aggarwal Date: Fri, 28 Jul 2023 12:41:46 +0530 Subject: [PATCH 15/75] amending tests to include all codecs (#8907) Signed-off-by: Sarthak Aggarwal --- .../test/java/org/opensearch/upgrades/IndexingIT.java | 4 ++-- .../indices/replication/SegmentReplicationIT.java | 6 +----- .../org/opensearch/test/OpenSearchIntegTestCase.java | 10 +++++++++- 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java index b60ee09d39048..d196ec3dcd0a8 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java @@ -40,7 +40,6 @@ import org.opensearch.common.Booleans; import org.opensearch.common.io.Streams; import org.opensearch.common.settings.Settings; -import org.opensearch.index.codec.CodecService; import org.opensearch.index.engine.EngineConfig; import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.test.rest.yaml.ObjectPath; @@ -54,6 +53,7 @@ import static org.opensearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING; import static org.opensearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; +import static org.opensearch.test.OpenSearchIntegTestCase.CODECS; /** * Basic test that indexed documents survive the rolling restart. See @@ -267,7 +267,7 @@ public void testIndexingWithSegRep() throws Exception { .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) .put( EngineConfig.INDEX_CODEC_SETTING.getKey(), - randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC, CodecService.LUCENE_DEFAULT_CODEC) + randomFrom(CODECS) ) .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms"); createIndex(indexName, settings.build()); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java index 3ab1a2a8564c5..405d0b595e02b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java @@ -53,7 +53,6 @@ import org.opensearch.index.SegmentReplicationPerGroupStats; import org.opensearch.index.SegmentReplicationPressureService; import org.opensearch.index.SegmentReplicationShardStats; -import org.opensearch.index.codec.CodecService; import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.EngineConfig; import org.opensearch.index.engine.NRTReplicationReaderManager; @@ -201,10 +200,7 @@ public void testReplicationAfterPrimaryRefreshAndFlush() throws Exception { final String nodeB = internalCluster().startDataOnlyNode(); final Settings settings = Settings.builder() .put(indexSettings()) - .put( - EngineConfig.INDEX_CODEC_SETTING.getKey(), - randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC, CodecService.LUCENE_DEFAULT_CODEC) - ) + .put(EngineConfig.INDEX_CODEC_SETTING.getKey(), randomFrom(CODECS)) .build(); createIndex(INDEX_NAME, settings); ensureGreen(INDEX_NAME); diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index 3564bd667ee2b..6e3364b208180 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -364,6 +364,14 @@ public abstract class OpenSearchIntegTestCase extends OpenSearchTestCase { private static OpenSearchIntegTestCase INSTANCE = null; // see @SuiteScope private static Long SUITE_SEED = null; + public static List CODECS = List.of( + CodecService.DEFAULT_CODEC, + CodecService.BEST_COMPRESSION_CODEC, + CodecService.LUCENE_DEFAULT_CODEC, + CodecService.ZSTD_CODEC, + CodecService.ZSTD_NO_DICT_CODEC + ); + @BeforeClass public static void beforeClass() throws Exception { SUITE_SEED = randomLong(); @@ -427,7 +435,7 @@ protected void randomIndexTemplate() { // otherwise, use it, it has assertions and so on that can find bugs. SuppressCodecs annotation = getClass().getAnnotation(SuppressCodecs.class); if (annotation != null && annotation.value().length == 1 && "*".equals(annotation.value()[0])) { - randomSettingsBuilder.put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC)); + randomSettingsBuilder.put("index.codec", randomFrom(CODECS)); } else { randomSettingsBuilder.put("index.codec", CodecService.LUCENE_DEFAULT_CODEC); } From 96630f036f80708126ef4a2459577dab629220ed Mon Sep 17 00:00:00 2001 From: Gaurav Bafna <85113518+gbbafna@users.noreply.github.com> Date: Fri, 28 Jul 2023 12:53:09 +0530 Subject: [PATCH 16/75] Add metadata prefix to Remote Translog Metadata file (#8914) Signed-off-by: Gaurav Bafna --- .../transfer/BlobStoreTransferService.java | 7 ++-- .../translog/transfer/TransferService.java | 12 +++++-- .../transfer/TranslogTransferManager.java | 8 ++++- .../transfer/TranslogTransferMetadata.java | 3 ++ .../TranslogTransferManagerTests.java | 32 +++++++++++++------ .../blobstore/BlobStoreRepositoryTests.java | 2 -- 6 files changed, 45 insertions(+), 19 deletions(-) diff --git a/server/src/main/java/org/opensearch/index/translog/transfer/BlobStoreTransferService.java b/server/src/main/java/org/opensearch/index/translog/transfer/BlobStoreTransferService.java index 974e8af42b939..95424d86fba34 100644 --- a/server/src/main/java/org/opensearch/index/translog/transfer/BlobStoreTransferService.java +++ b/server/src/main/java/org/opensearch/index/translog/transfer/BlobStoreTransferService.java @@ -213,17 +213,18 @@ public void listFoldersAsync(String threadpoolName, Iterable path, Actio }); } - public void listAllInSortedOrder(Iterable path, int limit, ActionListener> listener) { - blobStore.blobContainer((BlobPath) path).listBlobsByPrefixInSortedOrder("", limit, LEXICOGRAPHIC, listener); + public void listAllInSortedOrder(Iterable path, String filenamePrefix, int limit, ActionListener> listener) { + blobStore.blobContainer((BlobPath) path).listBlobsByPrefixInSortedOrder(filenamePrefix, limit, LEXICOGRAPHIC, listener); } public void listAllInSortedOrderAsync( String threadpoolName, Iterable path, + String filenamePrefix, int limit, ActionListener> listener ) { - threadPool.executor(threadpoolName).execute(() -> { listAllInSortedOrder(path, limit, listener); }); + threadPool.executor(threadpoolName).execute(() -> { listAllInSortedOrder(path, filenamePrefix, limit, listener); }); } } diff --git a/server/src/main/java/org/opensearch/index/translog/transfer/TransferService.java b/server/src/main/java/org/opensearch/index/translog/transfer/TransferService.java index a240fd38cda11..885a2e32e915d 100644 --- a/server/src/main/java/org/opensearch/index/translog/transfer/TransferService.java +++ b/server/src/main/java/org/opensearch/index/translog/transfer/TransferService.java @@ -125,8 +125,14 @@ void uploadBlobs( */ InputStream downloadBlob(Iterable path, String fileName) throws IOException; - void listAllInSortedOrder(Iterable path, int limit, ActionListener> listener); - - void listAllInSortedOrderAsync(String threadpoolName, Iterable path, int limit, ActionListener> listener); + void listAllInSortedOrder(Iterable path, String filenamePrefix, int limit, ActionListener> listener); + + void listAllInSortedOrderAsync( + String threadpoolName, + Iterable path, + String filenamePrefix, + int limit, + ActionListener> listener + ); } diff --git a/server/src/main/java/org/opensearch/index/translog/transfer/TranslogTransferManager.java b/server/src/main/java/org/opensearch/index/translog/transfer/TranslogTransferManager.java index e2bb5f74df234..850a0c1a6574c 100644 --- a/server/src/main/java/org/opensearch/index/translog/transfer/TranslogTransferManager.java +++ b/server/src/main/java/org/opensearch/index/translog/transfer/TranslogTransferManager.java @@ -210,7 +210,12 @@ public TranslogTransferMetadata readMetadata() throws IOException { ); try { - transferService.listAllInSortedOrder(remoteMetadataTransferPath, 1, latchedActionListener); + transferService.listAllInSortedOrder( + remoteMetadataTransferPath, + TranslogTransferMetadata.METADATA_PREFIX, + 1, + latchedActionListener + ); latch.await(); } catch (InterruptedException e) { throw new IOException("Exception while reading/downloading metadafile", e); @@ -367,6 +372,7 @@ public void deleteStaleTranslogMetadataFilesAsync(Runnable onCompletion) { transferService.listAllInSortedOrderAsync( ThreadPool.Names.REMOTE_PURGE, remoteMetadataTransferPath, + TranslogTransferMetadata.METADATA_PREFIX, Integer.MAX_VALUE, new ActionListener<>() { @Override diff --git a/server/src/main/java/org/opensearch/index/translog/transfer/TranslogTransferMetadata.java b/server/src/main/java/org/opensearch/index/translog/transfer/TranslogTransferMetadata.java index 75d6549b23f1e..a8b3404d3f2ce 100644 --- a/server/src/main/java/org/opensearch/index/translog/transfer/TranslogTransferMetadata.java +++ b/server/src/main/java/org/opensearch/index/translog/transfer/TranslogTransferMetadata.java @@ -36,6 +36,8 @@ public class TranslogTransferMetadata { public static final String METADATA_SEPARATOR = "__"; + public static final String METADATA_PREFIX = "metadata"; + static final int BUFFER_SIZE = 4096; static final int CURRENT_VERSION = 1; @@ -83,6 +85,7 @@ public String getFileName() { return String.join( METADATA_SEPARATOR, Arrays.asList( + METADATA_PREFIX, RemoteStoreUtils.invertLong(primaryTerm), RemoteStoreUtils.invertLong(generation), RemoteStoreUtils.invertLong(createdAt), diff --git a/server/src/test/java/org/opensearch/index/translog/transfer/TranslogTransferManagerTests.java b/server/src/test/java/org/opensearch/index/translog/transfer/TranslogTransferManagerTests.java index b7091f3f4f8a6..3d622d6bdf8b8 100644 --- a/server/src/test/java/org/opensearch/index/translog/transfer/TranslogTransferManagerTests.java +++ b/server/src/test/java/org/opensearch/index/translog/transfer/TranslogTransferManagerTests.java @@ -44,6 +44,7 @@ import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyMap; import static org.mockito.ArgumentMatchers.anySet; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doNothing; @@ -205,11 +206,12 @@ public void testReadMetadataNoFile() throws IOException { null ); doAnswer(invocation -> { - LatchedActionListener> latchedActionListener = invocation.getArgument(2); + LatchedActionListener> latchedActionListener = invocation.getArgument(3); List bmList = new LinkedList<>(); latchedActionListener.onResponse(bmList); return null; - }).when(transferService).listAllInSortedOrder(any(BlobPath.class), anyInt(), any(ActionListener.class)); + }).when(transferService) + .listAllInSortedOrder(any(BlobPath.class), eq(TranslogTransferMetadata.METADATA_PREFIX), anyInt(), any(ActionListener.class)); assertNull(translogTransferManager.readMetadata()); } @@ -225,12 +227,13 @@ public void testReadMetadataSingleFile() throws IOException { TranslogTransferMetadata tm = new TranslogTransferMetadata(1, 1, 1, 2); String mdFilename = tm.getFileName(); doAnswer(invocation -> { - LatchedActionListener> latchedActionListener = invocation.getArgument(2); + LatchedActionListener> latchedActionListener = invocation.getArgument(3); List bmList = new LinkedList<>(); bmList.add(new PlainBlobMetadata(mdFilename, 1)); latchedActionListener.onResponse(bmList); return null; - }).when(transferService).listAllInSortedOrder(any(BlobPath.class), anyInt(), any(ActionListener.class)); + }).when(transferService) + .listAllInSortedOrder(any(BlobPath.class), eq(TranslogTransferMetadata.METADATA_PREFIX), anyInt(), any(ActionListener.class)); TranslogTransferMetadata metadata = createTransferSnapshot().getTranslogTransferMetadata(); when(transferService.downloadBlob(any(BlobPath.class), eq(mdFilename))).thenReturn( @@ -252,12 +255,13 @@ public void testReadMetadataReadException() throws IOException { String mdFilename = tm.getFileName(); doAnswer(invocation -> { - LatchedActionListener> latchedActionListener = invocation.getArgument(2); + LatchedActionListener> latchedActionListener = invocation.getArgument(3); List bmList = new LinkedList<>(); bmList.add(new PlainBlobMetadata(mdFilename, 1)); latchedActionListener.onResponse(bmList); return null; - }).when(transferService).listAllInSortedOrder(any(BlobPath.class), anyInt(), any(ActionListener.class)); + }).when(transferService) + .listAllInSortedOrder(any(BlobPath.class), eq(TranslogTransferMetadata.METADATA_PREFIX), anyInt(), any(ActionListener.class)); when(transferService.downloadBlob(any(BlobPath.class), eq(mdFilename))).thenThrow(new IOException("Something went wrong")); @@ -283,10 +287,11 @@ public void testReadMetadataListException() throws IOException { ); doAnswer(invocation -> { - LatchedActionListener> latchedActionListener = invocation.getArgument(2); + LatchedActionListener> latchedActionListener = invocation.getArgument(3); latchedActionListener.onFailure(new IOException("Issue while listing")); return null; - }).when(transferService).listAllInSortedOrder(any(BlobPath.class), anyInt(), any(ActionListener.class)); + }).when(transferService) + .listAllInSortedOrder(any(BlobPath.class), eq(TranslogTransferMetadata.METADATA_PREFIX), anyInt(), any(ActionListener.class)); when(transferService.downloadBlob(any(BlobPath.class), any(String.class))).thenThrow(new IOException("Something went wrong")); @@ -416,7 +421,7 @@ public void testDeleteStaleTranslogMetadata() { String tm2 = new TranslogTransferMetadata(1, 2, 1, 2).getFileName(); String tm3 = new TranslogTransferMetadata(2, 3, 1, 2).getFileName(); doAnswer(invocation -> { - ActionListener> actionListener = invocation.getArgument(3); + ActionListener> actionListener = invocation.getArgument(4); List bmList = new LinkedList<>(); bmList.add(new PlainBlobMetadata(tm1, 1)); bmList.add(new PlainBlobMetadata(tm2, 1)); @@ -424,12 +429,19 @@ public void testDeleteStaleTranslogMetadata() { actionListener.onResponse(bmList); return null; }).when(transferService) - .listAllInSortedOrderAsync(eq(ThreadPool.Names.REMOTE_PURGE), any(BlobPath.class), anyInt(), any(ActionListener.class)); + .listAllInSortedOrderAsync( + eq(ThreadPool.Names.REMOTE_PURGE), + any(BlobPath.class), + anyString(), + anyInt(), + any(ActionListener.class) + ); List files = List.of(tm2, tm3); translogTransferManager.deleteStaleTranslogMetadataFilesAsync(() -> { verify(transferService).listAllInSortedOrderAsync( eq(ThreadPool.Names.REMOTE_PURGE), any(BlobPath.class), + eq(TranslogTransferMetadata.METADATA_PREFIX), eq(Integer.MAX_VALUE), any() ); diff --git a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java index 28513f279f8ad..9d711d464754c 100644 --- a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -32,7 +32,6 @@ package org.opensearch.repositories.blobstore; -import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.Version; import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; @@ -91,7 +90,6 @@ /** * Tests for the {@link BlobStoreRepository} and its subclasses. */ -@LuceneTestCase.SuppressFileSystems("ExtrasFS") public class BlobStoreRepositoryTests extends OpenSearchSingleNodeTestCase { static final String REPO_TYPE = "fsLike"; From 106e83a75205e5ff5a3a760b28a53f71240e7e61 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Fri, 28 Jul 2023 10:27:36 -0400 Subject: [PATCH 17/75] [BUG] CompletionSuggestSearchIT.testSkipDuplicates is flaky (#8963) Signed-off-by: Andriy Redko --- .../search/internal/ContextIndexSearcher.java | 4 ++++ .../suggest/completion/CompletionSuggester.java | 11 +++++++++-- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java index e3ca932eb4699..5384b47cc69ec 100644 --- a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java @@ -343,6 +343,10 @@ private void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collecto } } } + + // Note: this is called if collection ran successfully, including the above special cases of + // CollectionTerminatedException and TimeExceededException, but no other exception. + leafCollector.finish(); } private Weight wrapWeight(Weight weight) { diff --git a/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggester.java b/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggester.java index 175503cb94e3d..df18e874be9c9 100644 --- a/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggester.java +++ b/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggester.java @@ -35,6 +35,7 @@ import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Weight; import org.apache.lucene.search.suggest.document.CompletionQuery; import org.apache.lucene.search.suggest.document.TopSuggestDocs; @@ -108,15 +109,21 @@ private static void suggest(IndexSearcher searcher, CompletionQuery query, TopSu for (LeafReaderContext context : searcher.getIndexReader().leaves()) { BulkScorer scorer = weight.bulkScorer(context); if (scorer != null) { + LeafCollector leafCollector = null; try { - scorer.score(collector.getLeafCollector(context), context.reader().getLiveDocs()); + leafCollector = collector.getLeafCollector(context); + scorer.score(leafCollector, context.reader().getLiveDocs()); } catch (CollectionTerminatedException e) { // collection was terminated prematurely // continue with the following leaf } + // Note: this is called if collection ran successfully, including the above special cases of + // CollectionTerminatedException and TimeExceededException, but no other exception. + if (leafCollector != null) { + leafCollector.finish(); + } } } - collector.finish(); } @Override From 0003bd86332b8a59a3581bc4f48bdb3bf69ecab0 Mon Sep 17 00:00:00 2001 From: Evan Kielley Date: Fri, 28 Jul 2023 07:36:56 -0700 Subject: [PATCH 18/75] Added empty build.gradle files for arm64 and s390x because the gradle assemble task fails without them. (#8904) Signed-off-by: Evan Kielley --- .../docker/docker-arm64-export/build.gradle | 13 +++++++++++++ .../docker/docker-s390x-export/build.gradle | 13 +++++++++++++ 2 files changed, 26 insertions(+) create mode 100644 distribution/docker/docker-arm64-export/build.gradle create mode 100644 distribution/docker/docker-s390x-export/build.gradle diff --git a/distribution/docker/docker-arm64-export/build.gradle b/distribution/docker/docker-arm64-export/build.gradle new file mode 100644 index 0000000000000..3506c4e39c234 --- /dev/null +++ b/distribution/docker/docker-arm64-export/build.gradle @@ -0,0 +1,13 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +// This file is intentionally blank. All configuration of the +// export is done in the parent project. diff --git a/distribution/docker/docker-s390x-export/build.gradle b/distribution/docker/docker-s390x-export/build.gradle new file mode 100644 index 0000000000000..3506c4e39c234 --- /dev/null +++ b/distribution/docker/docker-s390x-export/build.gradle @@ -0,0 +1,13 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +// This file is intentionally blank. All configuration of the +// export is done in the parent project. From b4b25dc0ef845d0f7a45b7db0ab931dcf783c560 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Fri, 28 Jul 2023 12:32:16 -0400 Subject: [PATCH 19/75] Revert "amending tests to include all codecs (#8907)" (#8968) This reverts commit c43743dd8ac132314dfe375f712c37a7a0f7d8df. Signed-off-by: Andriy Redko --- .../test/java/org/opensearch/upgrades/IndexingIT.java | 4 ++-- .../indices/replication/SegmentReplicationIT.java | 6 +++++- .../org/opensearch/test/OpenSearchIntegTestCase.java | 10 +--------- 3 files changed, 8 insertions(+), 12 deletions(-) diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java index d196ec3dcd0a8..b60ee09d39048 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java @@ -40,6 +40,7 @@ import org.opensearch.common.Booleans; import org.opensearch.common.io.Streams; import org.opensearch.common.settings.Settings; +import org.opensearch.index.codec.CodecService; import org.opensearch.index.engine.EngineConfig; import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.test.rest.yaml.ObjectPath; @@ -53,7 +54,6 @@ import static org.opensearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING; import static org.opensearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; -import static org.opensearch.test.OpenSearchIntegTestCase.CODECS; /** * Basic test that indexed documents survive the rolling restart. See @@ -267,7 +267,7 @@ public void testIndexingWithSegRep() throws Exception { .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) .put( EngineConfig.INDEX_CODEC_SETTING.getKey(), - randomFrom(CODECS) + randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC, CodecService.LUCENE_DEFAULT_CODEC) ) .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms"); createIndex(indexName, settings.build()); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java index 405d0b595e02b..3ab1a2a8564c5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java @@ -53,6 +53,7 @@ import org.opensearch.index.SegmentReplicationPerGroupStats; import org.opensearch.index.SegmentReplicationPressureService; import org.opensearch.index.SegmentReplicationShardStats; +import org.opensearch.index.codec.CodecService; import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.EngineConfig; import org.opensearch.index.engine.NRTReplicationReaderManager; @@ -200,7 +201,10 @@ public void testReplicationAfterPrimaryRefreshAndFlush() throws Exception { final String nodeB = internalCluster().startDataOnlyNode(); final Settings settings = Settings.builder() .put(indexSettings()) - .put(EngineConfig.INDEX_CODEC_SETTING.getKey(), randomFrom(CODECS)) + .put( + EngineConfig.INDEX_CODEC_SETTING.getKey(), + randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC, CodecService.LUCENE_DEFAULT_CODEC) + ) .build(); createIndex(INDEX_NAME, settings); ensureGreen(INDEX_NAME); diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index 6e3364b208180..3564bd667ee2b 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -364,14 +364,6 @@ public abstract class OpenSearchIntegTestCase extends OpenSearchTestCase { private static OpenSearchIntegTestCase INSTANCE = null; // see @SuiteScope private static Long SUITE_SEED = null; - public static List CODECS = List.of( - CodecService.DEFAULT_CODEC, - CodecService.BEST_COMPRESSION_CODEC, - CodecService.LUCENE_DEFAULT_CODEC, - CodecService.ZSTD_CODEC, - CodecService.ZSTD_NO_DICT_CODEC - ); - @BeforeClass public static void beforeClass() throws Exception { SUITE_SEED = randomLong(); @@ -435,7 +427,7 @@ protected void randomIndexTemplate() { // otherwise, use it, it has assertions and so on that can find bugs. SuppressCodecs annotation = getClass().getAnnotation(SuppressCodecs.class); if (annotation != null && annotation.value().length == 1 && "*".equals(annotation.value()[0])) { - randomSettingsBuilder.put("index.codec", randomFrom(CODECS)); + randomSettingsBuilder.put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC)); } else { randomSettingsBuilder.put("index.codec", CodecService.LUCENE_DEFAULT_CODEC); } From 05b6647bf3af9556b68857ce14c131256bcd22f9 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Fri, 28 Jul 2023 14:12:22 -0400 Subject: [PATCH 20/75] [distribution/archives] [Linux] [x64] Provide the variant of the distributions bundled with JRE (#8195) Signed-off-by: Andriy Redko --- CHANGELOG.md | 1 + DEVELOPER_GUIDE.md | 5 +- .../opensearch/gradle/JavaPackageType.java | 15 ++ .../org/opensearch/gradle/JavaVariant.java | 197 ++++++++++++++++++ .../main/java/org/opensearch/gradle/Jdk.java | 183 +--------------- .../main/java/org/opensearch/gradle/Jre.java | 18 ++ .../opensearch/gradle/JreDownloadPlugin.java | 147 +++++++++++++ .../gradle/OpenSearchDistribution.java | 12 +- .../opensearch/gradle/VersionProperties.java | 4 + .../InternalDistributionDownloadPlugin.java | 7 +- .../gradle/test/DistroTestPlugin.java | 21 +- .../opensearch.jre-download.properties | 12 ++ .../DistributionDownloadPluginTests.java | 63 ++++-- distribution/archives/build.gradle | 55 +++-- distribution/build.gradle | 61 +++++- distribution/src/bin/opensearch-env | 9 +- .../org/opensearch/monitor/jvm/JvmInfo.java | 9 +- .../main/java/org/opensearch/node/Node.java | 2 +- settings.gradle | 1 + 19 files changed, 568 insertions(+), 254 deletions(-) create mode 100644 buildSrc/src/main/java/org/opensearch/gradle/JavaPackageType.java create mode 100644 buildSrc/src/main/java/org/opensearch/gradle/JavaVariant.java create mode 100644 buildSrc/src/main/java/org/opensearch/gradle/Jre.java create mode 100644 buildSrc/src/main/java/org/opensearch/gradle/JreDownloadPlugin.java create mode 100644 buildSrc/src/main/resources/META-INF/gradle-plugins/opensearch.jre-download.properties diff --git a/CHANGELOG.md b/CHANGELOG.md index 12dae4fca545e..2f20b3b8d7b51 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -78,6 +78,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Added - Add server version as REST response header [#6583](https://github.com/opensearch-project/OpenSearch/issues/6583) - Start replication checkpointTimers on primary before segments upload to remote store. ([#8221]()https://github.com/opensearch-project/OpenSearch/pull/8221) +- [distribution/archives] [Linux] [x64] Provide the variant of the distributions bundled with JRE ([#8195]()https://github.com/opensearch-project/OpenSearch/pull/8195) ### Dependencies - Bump `org.apache.logging.log4j:log4j-core` from 2.17.1 to 2.20.0 ([#8307](https://github.com/opensearch-project/OpenSearch/pull/8307)) diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 0baf626142238..1dce1f8a75035 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -264,7 +264,10 @@ This repository is split into many top level directories. The most important one ### `distribution` -Builds our tar and zip archives and our rpm and deb packages. +Builds our tar and zip archives and our rpm and deb packages. There are several flavors of the distributions, with the classifier included in the name of the final deliverable (archive or package): + - default (no classifier), the distribution with bundled JDK + - `-no-jdk-` - the distribution without bundled JDK/JRE, assumes the JDK/JRE is going to be pre-installed on the target systems + - `-jre-` - the distribution bundled with JRE (smaller footprint), supported as experimental feature for some platforms ### `libs` diff --git a/buildSrc/src/main/java/org/opensearch/gradle/JavaPackageType.java b/buildSrc/src/main/java/org/opensearch/gradle/JavaPackageType.java new file mode 100644 index 0000000000000..2acc335d80df0 --- /dev/null +++ b/buildSrc/src/main/java/org/opensearch/gradle/JavaPackageType.java @@ -0,0 +1,15 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.gradle; + +public enum JavaPackageType { + NONE, + JRE, + JDK +} diff --git a/buildSrc/src/main/java/org/opensearch/gradle/JavaVariant.java b/buildSrc/src/main/java/org/opensearch/gradle/JavaVariant.java new file mode 100644 index 0000000000000..5f576984627a8 --- /dev/null +++ b/buildSrc/src/main/java/org/opensearch/gradle/JavaVariant.java @@ -0,0 +1,197 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.gradle; + +import org.gradle.api.Buildable; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.model.ObjectFactory; +import org.gradle.api.provider.Property; +import org.gradle.api.tasks.TaskDependency; + +import java.io.File; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +abstract class JavaVariant implements Buildable, Iterable { + + private static final List ALLOWED_ARCHITECTURES = Collections.unmodifiableList( + Arrays.asList("aarch64", "x64", "s390x", "ppc64le") + ); + private static final List ALLOWED_VENDORS = Collections.unmodifiableList(Arrays.asList("adoptium", "adoptopenjdk", "openjdk")); + private static final List ALLOWED_PLATFORMS = Collections.unmodifiableList( + Arrays.asList("darwin", "freebsd", "linux", "mac", "windows") + ); + private static final Pattern VERSION_PATTERN = Pattern.compile("(\\d+)(\\.\\d+\\.\\d+)?\\+(\\d+(?:\\.\\d+)?)(@([a-f0-9]{32}))?"); + private static final Pattern LEGACY_VERSION_PATTERN = Pattern.compile("(\\d)(u\\d+)(?:\\+|\\-)(b\\d+?)(@([a-f0-9]{32}))?"); + + private final String name; + private final Configuration configuration; + + private final Property vendor; + private final Property version; + private final Property platform; + private final Property architecture; + private String baseVersion; + private String major; + private String build; + private String hash; + + JavaVariant(String name, Configuration configuration, ObjectFactory objectFactory) { + this.name = name; + this.configuration = configuration; + this.vendor = objectFactory.property(String.class); + this.version = objectFactory.property(String.class); + this.platform = objectFactory.property(String.class); + this.architecture = objectFactory.property(String.class); + } + + public String getName() { + return name; + } + + public String getVendor() { + return vendor.get(); + } + + public void setVendor(final String vendor) { + if (ALLOWED_VENDORS.contains(vendor) == false) { + throw new IllegalArgumentException("unknown vendor [" + vendor + "] for jdk [" + name + "], must be one of " + ALLOWED_VENDORS); + } + this.vendor.set(vendor); + } + + public String getVersion() { + return version.get(); + } + + public void setVersion(String version) { + if (VERSION_PATTERN.matcher(version).matches() == false && LEGACY_VERSION_PATTERN.matcher(version).matches() == false) { + throw new IllegalArgumentException("malformed version [" + version + "] for jdk [" + name + "]"); + } + parseVersion(version); + this.version.set(version); + } + + public String getPlatform() { + return platform.get(); + } + + public void setPlatform(String platform) { + if (ALLOWED_PLATFORMS.contains(platform) == false) { + throw new IllegalArgumentException( + "unknown platform [" + platform + "] for jdk [" + name + "], must be one of " + ALLOWED_PLATFORMS + ); + } + this.platform.set(platform); + } + + public String getArchitecture() { + return architecture.get(); + } + + public void setArchitecture(final String architecture) { + String jdkArchitecture = translateJdkArchitecture(architecture); + if (ALLOWED_ARCHITECTURES.contains(jdkArchitecture) == false) { + throw new IllegalArgumentException( + "unknown architecture [" + jdkArchitecture + "] for jdk [" + name + "], must be one of " + ALLOWED_ARCHITECTURES + ); + } + this.architecture.set(jdkArchitecture); + } + + public String getBaseVersion() { + return baseVersion; + } + + public String getMajor() { + return major; + } + + public String getBuild() { + return build; + } + + public String getHash() { + return hash; + } + + public String getPath() { + return configuration.getSingleFile().toString(); + } + + public String getConfigurationName() { + return configuration.getName(); + } + + @Override + public String toString() { + return getPath(); + } + + @Override + public TaskDependency getBuildDependencies() { + return configuration.getBuildDependencies(); + } + + // internal, make this jdks configuration unmodifiable + void finalizeValues() { + if (version.isPresent() == false) { + throw new IllegalArgumentException("version not specified for jdk [" + name + "]"); + } + if (platform.isPresent() == false) { + throw new IllegalArgumentException("platform not specified for jdk [" + name + "]"); + } + if (vendor.isPresent() == false) { + throw new IllegalArgumentException("vendor not specified for jdk [" + name + "]"); + } + if (architecture.isPresent() == false) { + throw new IllegalArgumentException("architecture not specified for jdk [" + name + "]"); + } + version.finalizeValue(); + platform.finalizeValue(); + vendor.finalizeValue(); + architecture.finalizeValue(); + } + + @Override + public Iterator iterator() { + return configuration.iterator(); + } + + private void parseVersion(String version) { + // decompose the bundled jdk version, broken into elements as: [feature, interim, update, build] + // Note the "patch" version is not yet handled here, as it has not yet been used by java. + Matcher jdkVersionMatcher = VERSION_PATTERN.matcher(version); + if (jdkVersionMatcher.matches() == false) { + // Try again with the pre-Java9 version format + jdkVersionMatcher = LEGACY_VERSION_PATTERN.matcher(version); + + if (jdkVersionMatcher.matches() == false) { + throw new IllegalArgumentException("Malformed jdk version [" + version + "]"); + } + } + + baseVersion = jdkVersionMatcher.group(1) + (jdkVersionMatcher.group(2) != null ? (jdkVersionMatcher.group(2)) : ""); + major = jdkVersionMatcher.group(1); + build = jdkVersionMatcher.group(3); + hash = jdkVersionMatcher.group(5); + } + + private String translateJdkArchitecture(String architecture) { + /* + * Jdk uses aarch64 from ARM. Translating from arm64 to aarch64 which Jdk understands. + */ + return "arm64".equals(architecture) ? "aarch64" : architecture; + } + +} diff --git a/buildSrc/src/main/java/org/opensearch/gradle/Jdk.java b/buildSrc/src/main/java/org/opensearch/gradle/Jdk.java index 06e857744be2d..3218abe726639 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/Jdk.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/Jdk.java @@ -32,140 +32,13 @@ package org.opensearch.gradle; -import org.gradle.api.Buildable; import org.gradle.api.artifacts.Configuration; import org.gradle.api.model.ObjectFactory; -import org.gradle.api.provider.Property; -import org.gradle.api.tasks.TaskDependency; import org.gradle.internal.os.OperatingSystem; -import java.io.File; -import java.util.Arrays; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class Jdk implements Buildable, Iterable { - - private static final List ALLOWED_ARCHITECTURES = Collections.unmodifiableList( - Arrays.asList("aarch64", "x64", "s390x", "ppc64le") - ); - private static final List ALLOWED_VENDORS = Collections.unmodifiableList(Arrays.asList("adoptium", "adoptopenjdk", "openjdk")); - private static final List ALLOWED_PLATFORMS = Collections.unmodifiableList( - Arrays.asList("darwin", "freebsd", "linux", "mac", "windows") - ); - private static final Pattern VERSION_PATTERN = Pattern.compile("(\\d+)(\\.\\d+\\.\\d+)?\\+(\\d+(?:\\.\\d+)?)(@([a-f0-9]{32}))?"); - private static final Pattern LEGACY_VERSION_PATTERN = Pattern.compile("(\\d)(u\\d+)(?:\\+|\\-)(b\\d+?)(@([a-f0-9]{32}))?"); - - private final String name; - private final Configuration configuration; - - private final Property vendor; - private final Property version; - private final Property platform; - private final Property architecture; - private String baseVersion; - private String major; - private String build; - private String hash; - +public class Jdk extends JavaVariant { Jdk(String name, Configuration configuration, ObjectFactory objectFactory) { - this.name = name; - this.configuration = configuration; - this.vendor = objectFactory.property(String.class); - this.version = objectFactory.property(String.class); - this.platform = objectFactory.property(String.class); - this.architecture = objectFactory.property(String.class); - } - - public String getName() { - return name; - } - - public String getVendor() { - return vendor.get(); - } - - public void setVendor(final String vendor) { - if (ALLOWED_VENDORS.contains(vendor) == false) { - throw new IllegalArgumentException("unknown vendor [" + vendor + "] for jdk [" + name + "], must be one of " + ALLOWED_VENDORS); - } - this.vendor.set(vendor); - } - - public String getVersion() { - return version.get(); - } - - public void setVersion(String version) { - if (VERSION_PATTERN.matcher(version).matches() == false && LEGACY_VERSION_PATTERN.matcher(version).matches() == false) { - throw new IllegalArgumentException("malformed version [" + version + "] for jdk [" + name + "]"); - } - parseVersion(version); - this.version.set(version); - } - - public String getPlatform() { - return platform.get(); - } - - public void setPlatform(String platform) { - if (ALLOWED_PLATFORMS.contains(platform) == false) { - throw new IllegalArgumentException( - "unknown platform [" + platform + "] for jdk [" + name + "], must be one of " + ALLOWED_PLATFORMS - ); - } - this.platform.set(platform); - } - - public String getArchitecture() { - return architecture.get(); - } - - public void setArchitecture(final String architecture) { - String jdkArchitecture = translateJdkArchitecture(architecture); - if (ALLOWED_ARCHITECTURES.contains(jdkArchitecture) == false) { - throw new IllegalArgumentException( - "unknown architecture [" + jdkArchitecture + "] for jdk [" + name + "], must be one of " + ALLOWED_ARCHITECTURES - ); - } - this.architecture.set(jdkArchitecture); - } - - public String getBaseVersion() { - return baseVersion; - } - - public String getMajor() { - return major; - } - - public String getBuild() { - return build; - } - - public String getHash() { - return hash; - } - - public String getPath() { - return configuration.getSingleFile().toString(); - } - - public String getConfigurationName() { - return configuration.getName(); - } - - @Override - public String toString() { - return getPath(); - } - - @Override - public TaskDependency getBuildDependencies() { - return configuration.getBuildDependencies(); + super(name, configuration, objectFactory); } public Object getBinJavaPath() { @@ -190,56 +63,4 @@ private String getHomeRoot() { boolean isOSX = "mac".equals(getPlatform()) || "darwin".equals(getPlatform()); return getPath() + (isOSX ? "/Contents/Home" : ""); } - - // internal, make this jdks configuration unmodifiable - void finalizeValues() { - if (version.isPresent() == false) { - throw new IllegalArgumentException("version not specified for jdk [" + name + "]"); - } - if (platform.isPresent() == false) { - throw new IllegalArgumentException("platform not specified for jdk [" + name + "]"); - } - if (vendor.isPresent() == false) { - throw new IllegalArgumentException("vendor not specified for jdk [" + name + "]"); - } - if (architecture.isPresent() == false) { - throw new IllegalArgumentException("architecture not specified for jdk [" + name + "]"); - } - version.finalizeValue(); - platform.finalizeValue(); - vendor.finalizeValue(); - architecture.finalizeValue(); - } - - @Override - public Iterator iterator() { - return configuration.iterator(); - } - - private void parseVersion(String version) { - // decompose the bundled jdk version, broken into elements as: [feature, interim, update, build] - // Note the "patch" version is not yet handled here, as it has not yet been used by java. - Matcher jdkVersionMatcher = VERSION_PATTERN.matcher(version); - if (jdkVersionMatcher.matches() == false) { - // Try again with the pre-Java9 version format - jdkVersionMatcher = LEGACY_VERSION_PATTERN.matcher(version); - - if (jdkVersionMatcher.matches() == false) { - throw new IllegalArgumentException("Malformed jdk version [" + version + "]"); - } - } - - baseVersion = jdkVersionMatcher.group(1) + (jdkVersionMatcher.group(2) != null ? (jdkVersionMatcher.group(2)) : ""); - major = jdkVersionMatcher.group(1); - build = jdkVersionMatcher.group(3); - hash = jdkVersionMatcher.group(5); - } - - private String translateJdkArchitecture(String architecture) { - /* - * Jdk uses aarch64 from ARM. Translating from arm64 to aarch64 which Jdk understands. - */ - return "arm64".equals(architecture) ? "aarch64" : architecture; - } - } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/Jre.java b/buildSrc/src/main/java/org/opensearch/gradle/Jre.java new file mode 100644 index 0000000000000..473bfc4860b80 --- /dev/null +++ b/buildSrc/src/main/java/org/opensearch/gradle/Jre.java @@ -0,0 +1,18 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.gradle; + +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.model.ObjectFactory; + +public class Jre extends JavaVariant { + Jre(String name, Configuration configuration, ObjectFactory objectFactory) { + super(name, configuration, objectFactory); + } +} diff --git a/buildSrc/src/main/java/org/opensearch/gradle/JreDownloadPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/JreDownloadPlugin.java new file mode 100644 index 0000000000000..5a00f41f07a60 --- /dev/null +++ b/buildSrc/src/main/java/org/opensearch/gradle/JreDownloadPlugin.java @@ -0,0 +1,147 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.gradle; + +import org.opensearch.gradle.transform.SymbolicLinkPreservingUntarTransform; +import org.opensearch.gradle.transform.UnzipTransform; +import org.gradle.api.GradleException; +import org.gradle.api.NamedDomainObjectContainer; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.dsl.RepositoryHandler; +import org.gradle.api.artifacts.repositories.IvyArtifactRepository; +import org.gradle.api.artifacts.type.ArtifactTypeDefinition; +import org.gradle.api.attributes.Attribute; +import org.gradle.api.internal.artifacts.ArtifactAttributes; + +public class JreDownloadPlugin implements Plugin { + public static final String VENDOR_ADOPTIUM = "adoptium"; + + private static final String REPO_NAME_PREFIX = "jre_repo_"; + private static final String EXTENSION_NAME = "jres"; + public static final String JRE_TRIMMED_PREFIX = "jdk-?\\d.*-jre"; + + @Override + public void apply(Project project) { + Attribute jreAttribute = Attribute.of("jre", Boolean.class); + project.getDependencies().getAttributesSchema().attribute(jreAttribute); + project.getDependencies().getArtifactTypes().maybeCreate(ArtifactTypeDefinition.ZIP_TYPE); + project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { + transformSpec.getFrom() + .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE) + .attribute(jreAttribute, true); + transformSpec.getTo() + .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + .attribute(jreAttribute, true); + transformSpec.parameters(parameters -> parameters.setTrimmedPrefixPattern(JRE_TRIMMED_PREFIX)); + }); + + ArtifactTypeDefinition tarArtifactTypeDefinition = project.getDependencies().getArtifactTypes().maybeCreate("tar.gz"); + project.getDependencies().registerTransform(SymbolicLinkPreservingUntarTransform.class, transformSpec -> { + transformSpec.getFrom() + .attribute(ArtifactAttributes.ARTIFACT_FORMAT, tarArtifactTypeDefinition.getName()) + .attribute(jreAttribute, true); + transformSpec.getTo() + .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + .attribute(jreAttribute, true); + transformSpec.parameters(parameters -> parameters.setTrimmedPrefixPattern(JRE_TRIMMED_PREFIX)); + }); + + NamedDomainObjectContainer jresContainer = project.container(Jre.class, name -> { + Configuration configuration = project.getConfigurations().create("jre_" + name); + configuration.setCanBeConsumed(false); + configuration.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + configuration.getAttributes().attribute(jreAttribute, true); + Jre jre = new Jre(name, configuration, project.getObjects()); + configuration.defaultDependencies(dependencies -> { + jre.finalizeValues(); + setupRepository(project, jre); + dependencies.add(project.getDependencies().create(dependencyNotation(jre))); + }); + return jre; + }); + project.getExtensions().add(EXTENSION_NAME, jresContainer); + } + + private void setupRepository(Project project, Jre jre) { + RepositoryHandler repositories = project.getRepositories(); + + /* + * Define the appropriate repository for the given JRE vendor and version + * + * For Oracle/OpenJDK/AdoptOpenJDK we define a repository per-version. + */ + String repoName = REPO_NAME_PREFIX + jre.getVendor() + "_" + jre.getVersion(); + String repoUrl; + String artifactPattern; + + if (jre.getVendor().equals(VENDOR_ADOPTIUM)) { + repoUrl = "https://github.com/adoptium/temurin" + jre.getMajor() + "-binaries/releases/download/"; + + if (jre.getMajor().equals("8")) { + // JDK-8 updates are always suffixed with 'U' (fe OpenJDK8U). + artifactPattern = "jdk" + + jre.getBaseVersion() + + "-" + + jre.getBuild() + + "/OpenJDK" + + jre.getMajor() + + "U" + + "-jre_[classifier]_[module]_hotspot_" + + jre.getBaseVersion() + + jre.getBuild() + + ".[ext]"; + } else { + // JDK updates are suffixed with 'U' (fe OpenJDK17U), whereas GA releases are not (fe OpenJDK17). + // To distinguish between those, the GA releases have only major version component (fe 17+32), + // the updates always have minor/patch components (fe 17.0.1+12), checking for the presence of + // version separator '.' should be enough. + artifactPattern = "jdk-" + jre.getBaseVersion() + "+" + jre.getBuild() + "/OpenJDK" + jre.getMajor() + // JDK-20 does use 'U' suffix all the time, no matter it is update or GA release + + (jre.getBaseVersion().contains(".") || jre.getBaseVersion().matches("^2\\d+$") ? "U" : "") + + "-jre_[classifier]_[module]_hotspot_" + + jre.getBaseVersion() + + "_" + + jre.getBuild() + + ".[ext]"; + } + } else { + throw new GradleException("Unknown JDK vendor [" + jre.getVendor() + "]"); + } + + // Define the repository if we haven't already + if (repositories.findByName(repoName) == null) { + repositories.ivy(repo -> { + repo.setName(repoName); + repo.setUrl(repoUrl); + repo.metadataSources(IvyArtifactRepository.MetadataSources::artifact); + repo.patternLayout(layout -> layout.artifact(artifactPattern)); + repo.content(repositoryContentDescriptor -> repositoryContentDescriptor.includeGroup(groupName(jre))); + }); + } + } + + @SuppressWarnings("unchecked") + public static NamedDomainObjectContainer getContainer(Project project) { + return (NamedDomainObjectContainer) project.getExtensions().getByName(EXTENSION_NAME); + } + + private static String dependencyNotation(Jre jre) { + String platformDep = jre.getPlatform().equals("darwin") || jre.getPlatform().equals("mac") ? "mac" : jre.getPlatform(); + String extension = jre.getPlatform().equals("windows") ? "zip" : "tar.gz"; + + return groupName(jre) + ":" + platformDep + ":" + jre.getBaseVersion() + ":" + jre.getArchitecture() + "@" + extension; + } + + private static String groupName(Jre jre) { + return jre.getVendor() + "_" + jre.getMajor() + "_jre"; + } + +} diff --git a/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchDistribution.java b/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchDistribution.java index 968bd13bd4011..0575c23fee9f6 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchDistribution.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchDistribution.java @@ -101,7 +101,7 @@ public boolean shouldExtract() { private final Property version; private final Property type; private final Property platform; - private final Property bundledJdk; + private final Property bundledJdk; private final Property failIfUnavailable; private final Configuration extracted; @@ -120,7 +120,7 @@ public boolean shouldExtract() { this.type = objectFactory.property(Type.class); this.type.convention(Type.ARCHIVE); this.platform = objectFactory.property(Platform.class); - this.bundledJdk = objectFactory.property(Boolean.class); + this.bundledJdk = objectFactory.property(JavaPackageType.class); this.failIfUnavailable = objectFactory.property(Boolean.class).convention(true); this.extracted = extractedConfiguration; } @@ -154,8 +154,8 @@ public void setType(Type type) { this.type.set(type); } - public boolean getBundledJdk() { - return bundledJdk.getOrElse(true); + public JavaPackageType getBundledJdk() { + return bundledJdk.getOrElse(JavaPackageType.JDK); } public boolean isDocker() { @@ -163,7 +163,7 @@ public boolean isDocker() { return type == Type.DOCKER; } - public void setBundledJdk(Boolean bundledJdk) { + public void setBundledJdk(JavaPackageType bundledJdk) { this.bundledJdk.set(bundledJdk); } @@ -266,7 +266,7 @@ void finalizeValues() { } if (bundledJdk.isPresent() == false) { - bundledJdk.set(true); + bundledJdk.set(JavaPackageType.JDK); } version.finalizeValue(); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/VersionProperties.java b/buildSrc/src/main/java/org/opensearch/gradle/VersionProperties.java index 7c942358e12c3..4d8b62d95dff1 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/VersionProperties.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/VersionProperties.java @@ -76,6 +76,10 @@ public static String getBundledJdk(final String platform) { return getBundledJdk(platform, null); } + public static String getBundledJre(final String platform, final String arch) { + return getBundledJdk(platform, arch); + } + public static String getBundledJdkVendor() { return bundledJdkVendor; } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionDownloadPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionDownloadPlugin.java index f4368b1cecc59..6a54612320c6c 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionDownloadPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionDownloadPlugin.java @@ -37,6 +37,7 @@ import org.opensearch.gradle.DistributionDependency; import org.opensearch.gradle.DistributionDownloadPlugin; import org.opensearch.gradle.DistributionResolution; +import org.opensearch.gradle.JavaPackageType; import org.opensearch.gradle.OpenSearchDistribution; import org.opensearch.gradle.Version; import org.opensearch.gradle.VersionProperties; @@ -99,7 +100,7 @@ private void registerInternalDistributionResolutions(NamedDomainObjectContainer< resolutions.register("bwc", distributionResolution -> distributionResolution.setResolver((project, distribution) -> { BwcVersions.UnreleasedVersionInfo unreleasedInfo = bwcVersions.unreleasedInfo(Version.fromString(distribution.getVersion())); if (unreleasedInfo != null) { - if (!distribution.getBundledJdk()) { + if (distribution.getBundledJdk() == JavaPackageType.NONE) { throw new GradleException( "Configuring a snapshot bwc distribution ('" + distribution.getName() @@ -167,8 +168,10 @@ private static String distributionProjectName(OpenSearchDistribution distributio ? "" : "-" + architecture.toString().toLowerCase(); - if (distribution.getBundledJdk() == false) { + if (distribution.getBundledJdk() == JavaPackageType.NONE) { projectName += "no-jdk-"; + } else if (distribution.getBundledJdk() == JavaPackageType.JRE) { + projectName += "jre-"; } switch (distribution.getType()) { case ARCHIVE: diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java index d32172758cfce..a311d89583acb 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java @@ -34,6 +34,7 @@ import org.opensearch.gradle.Architecture; import org.opensearch.gradle.DistributionDownloadPlugin; +import org.opensearch.gradle.JavaPackageType; import org.opensearch.gradle.OpenSearchDistribution; import org.opensearch.gradle.Jdk; import org.opensearch.gradle.JdkDownloadPlugin; @@ -71,6 +72,7 @@ import java.util.Locale; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import java.util.function.Supplier; import java.util.stream.Stream; @@ -137,7 +139,7 @@ public void apply(Project project) { } if ((distribution.getType() == OpenSearchDistribution.Type.DEB || distribution.getType() == OpenSearchDistribution.Type.RPM) - && distribution.getBundledJdk()) { + && distribution.getBundledJdk() != JavaPackageType.NONE) { for (Version version : BuildParams.getBwcVersions().getIndexCompatible()) { if (version.before("6.3.0")) { continue; // before opening xpack @@ -379,8 +381,8 @@ private List configureDistributions(Project project) { OpenSearchDistribution.Type.RPM, OpenSearchDistribution.Type.DOCKER )) { - for (boolean bundledJdk : Arrays.asList(true, false)) { - if (bundledJdk == false) { + for (JavaPackageType bundledJdk : Set.of(JavaPackageType.NONE, JavaPackageType.JDK)) { + if (bundledJdk == JavaPackageType.NONE) { // We'll never publish an ARM (arm64) build without a bundled JDK. if (architecture == Architecture.ARM64) { continue; @@ -403,8 +405,8 @@ private List configureDistributions(Project project) { OpenSearchDistribution.Platform.LINUX, OpenSearchDistribution.Platform.WINDOWS )) { - for (boolean bundledJdk : Arrays.asList(true, false)) { - if (bundledJdk == false && architecture != Architecture.X64) { + for (JavaPackageType bundledJdk : Set.of(JavaPackageType.NONE, JavaPackageType.JDK)) { + if (bundledJdk == JavaPackageType.NONE && architecture != Architecture.X64) { // We will never publish distributions for non-x86 (amd64) platforms // without a bundled JDK continue; @@ -432,7 +434,7 @@ private static OpenSearchDistribution createDistro( Architecture architecture, OpenSearchDistribution.Type type, OpenSearchDistribution.Platform platform, - boolean bundledJdk, + JavaPackageType bundledJdk, String version ) { String name = distroId(type, platform, bundledJdk, architecture) + "-" + version; @@ -466,11 +468,12 @@ private static boolean isWindows(Project project) { private static String distroId( OpenSearchDistribution.Type type, OpenSearchDistribution.Platform platform, - boolean bundledJdk, + JavaPackageType bundledJdk, Architecture architecture ) { - return (type == OpenSearchDistribution.Type.ARCHIVE ? platform + "-" : "") + type + (bundledJdk ? "" : "-no-jdk") - + (architecture == Architecture.X64 ? "" : "-" + architecture.toString().toLowerCase()); + return (type == OpenSearchDistribution.Type.ARCHIVE ? platform + "-" : "") + type + (bundledJdk != JavaPackageType.NONE + ? (bundledJdk == JavaPackageType.JDK ? "" : "-jre") + : "-no-jdk") + (architecture == Architecture.X64 ? "" : "-" + architecture.toString().toLowerCase()); } private static String destructiveDistroTestTaskName(OpenSearchDistribution distro) { diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/opensearch.jre-download.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/opensearch.jre-download.properties new file mode 100644 index 0000000000000..e9253488ffbeb --- /dev/null +++ b/buildSrc/src/main/resources/META-INF/gradle-plugins/opensearch.jre-download.properties @@ -0,0 +1,12 @@ +# +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +# + +implementation-class=org.opensearch.gradle.JreDownloadPlugin diff --git a/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java b/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java index 1a9647573f948..e439e8a047f34 100644 --- a/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java +++ b/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java @@ -45,6 +45,7 @@ import java.util.Arrays; import java.util.TreeSet; +import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.core.StringContains.containsString; public class DistributionDownloadPluginTests extends GradleUnitTestCase { @@ -76,7 +77,14 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase { ); public void testVersionDefault() { - OpenSearchDistribution distro = checkDistro(createProject(null, false), "testdistro", null, Type.ARCHIVE, Platform.LINUX, true); + OpenSearchDistribution distro = checkDistro( + createProject(null, false), + "testdistro", + null, + Type.ARCHIVE, + Platform.LINUX, + JavaPackageType.JDK + ); assertEquals(distro.getVersion(), VersionProperties.getOpenSearch()); } @@ -123,18 +131,32 @@ public void testBadVersionFormat() { "badversion", Type.ARCHIVE, Platform.LINUX, - true, + JavaPackageType.JDK, "Invalid version format: 'badversion'" ); } public void testTypeDefault() { - OpenSearchDistribution distro = checkDistro(createProject(null, false), "testdistro", "5.0.0", null, Platform.LINUX, true); + OpenSearchDistribution distro = checkDistro( + createProject(null, false), + "testdistro", + "5.0.0", + null, + Platform.LINUX, + JavaPackageType.JDK + ); assertEquals(distro.getType(), Type.ARCHIVE); } public void testPlatformDefault() { - OpenSearchDistribution distro = checkDistro(createProject(null, false), "testdistro", "5.0.0", Type.ARCHIVE, null, true); + OpenSearchDistribution distro = checkDistro( + createProject(null, false), + "testdistro", + "5.0.0", + Type.ARCHIVE, + null, + JavaPackageType.JDK + ); assertEquals(distro.getPlatform(), OpenSearchDistribution.CURRENT_PLATFORM); } @@ -151,8 +173,15 @@ public void testPlatformForIntegTest() { } public void testBundledJdkDefault() { - OpenSearchDistribution distro = checkDistro(createProject(null, false), "testdistro", "5.0.0", Type.ARCHIVE, Platform.LINUX, true); - assertTrue(distro.getBundledJdk()); + OpenSearchDistribution distro = checkDistro( + createProject(null, false), + "testdistro", + "5.0.0", + Type.ARCHIVE, + Platform.LINUX, + JavaPackageType.JDK + ); + assertThat(distro.getBundledJdk(), equalTo(JavaPackageType.JDK)); } public void testBundledJdkForIntegTest() { @@ -162,7 +191,7 @@ public void testBundledJdkForIntegTest() { "5.0.0", Type.INTEG_TEST_ZIP, null, - true, + JavaPackageType.JDK, "bundledJdk cannot be set on opensearch distribution [testdistro]" ); } @@ -178,7 +207,7 @@ public void testLocalCurrentVersionIntegTestZip() { public void testLocalCurrentVersionArchives() { for (Platform platform : Platform.values()) { - for (boolean bundledJdk : new boolean[] { true, false }) { + for (JavaPackageType bundledJdk : JavaPackageType.values()) { for (Architecture architecture : Architecture.values()) { // create a new project in each iteration, so that we know we are resolving the only additional project being created Project project = createProject(BWC_MINOR, true); @@ -204,7 +233,7 @@ public void testLocalCurrentVersionArchives() { public void testLocalCurrentVersionPackages() { for (Type packageType : new Type[] { Type.RPM, Type.DEB }) { - for (boolean bundledJdk : new boolean[] { true, false }) { + for (JavaPackageType bundledJdk : JavaPackageType.values()) { Project project = createProject(BWC_MINOR, true); String projectName = projectName(packageType.toString(), bundledJdk); Project packageProject = ProjectBuilder.builder().withParent(packagesProject).withName(projectName).build(); @@ -219,7 +248,7 @@ public void testLocalCurrentVersionPackages() { public void testLocalBwcArchives() { for (Platform platform : Platform.values()) { // note: no non bundled jdk for bwc - String configName = projectName(platform.toString(), true); + String configName = projectName(platform.toString(), JavaPackageType.JDK); configName += (platform == Platform.WINDOWS ? "-zip" : "-tar"); checkBwc("minor", configName, BWC_MINOR_VERSION, Type.ARCHIVE, platform, BWC_MINOR, true); @@ -232,7 +261,7 @@ public void testLocalBwcArchives() { public void testLocalBwcPackages() { for (Type packageType : new Type[] { Type.RPM, Type.DEB }) { // note: no non bundled jdk for bwc - String configName = projectName(packageType.toString(), true); + String configName = projectName(packageType.toString(), JavaPackageType.JDK); checkBwc("minor", configName, BWC_MINOR_VERSION, packageType, null, BWC_MINOR, true); checkBwc("staged", configName, BWC_STAGED_VERSION, packageType, null, BWC_STAGED, true); @@ -247,7 +276,7 @@ private void assertDistroError( String version, Type type, Platform platform, - Boolean bundledJdk, + JavaPackageType bundledJdk, String message ) { IllegalArgumentException e = expectThrows( @@ -263,7 +292,7 @@ private OpenSearchDistribution createDistro( String version, Type type, Platform platform, - Boolean bundledJdk + JavaPackageType bundledJdk ) { NamedDomainObjectContainer distros = DistributionDownloadPlugin.getContainer(project); return distros.create(name, distro -> { @@ -289,7 +318,7 @@ private OpenSearchDistribution checkDistro( String version, Type type, Platform platform, - Boolean bundledJdk + JavaPackageType bundledJdk ) { OpenSearchDistribution distribution = createDistro(project, name, version, type, platform, bundledJdk); distribution.finalizeValues(); @@ -315,7 +344,7 @@ private void checkBwc( Project archiveProject = ProjectBuilder.builder().withParent(bwcProject).withName(projectName).build(); archiveProject.getConfigurations().create(config); archiveProject.getArtifacts().add(config, new File("doesnotmatter")); - final OpenSearchDistribution distro = createDistro(project, "distro", version.toString(), type, platform, true); + final OpenSearchDistribution distro = createDistro(project, "distro", version.toString(), type, platform, JavaPackageType.JDK); distro.setArchitecture(Architecture.current()); checkPlugin(project); } @@ -335,7 +364,7 @@ private Project createProject(BwcVersions bwcVersions, boolean isInternal) { return project; } - private static String projectName(String base, boolean bundledJdk) { - return bundledJdk ? base : ("no-jdk-" + base); + private static String projectName(String base, JavaPackageType bundledJdk) { + return (bundledJdk == JavaPackageType.JDK) ? base : ((bundledJdk == JavaPackageType.NONE) ? ("no-jdk-" + base) : "jre-" + base); } } diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index e7137127053e2..161b8008525b4 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -28,9 +28,11 @@ * under the License. */ +import org.opensearch.gradle.JavaPackageType + apply plugin: 'opensearch.internal-distribution-archive-setup' -CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String platform, String architecture, boolean jdk) { +CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String platform, String architecture, JavaPackageType java) { return copySpec { into("opensearch-${version}") { into('lib') { @@ -39,19 +41,23 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla into('config') { dirMode 0750 fileMode 0660 - with configFiles(distributionType, jdk) + with configFiles(distributionType, java) from { dirMode 0750 jvmOptionsDir.getParent() } } into('bin') { - with binFiles(distributionType, jdk) + with binFiles(distributionType, java) } - if (jdk) { + if (java == JavaPackageType.JDK) { into("darwin".equals(platform) ? 'jdk.app' : 'jdk') { with jdkFiles(project, platform, architecture) } + } else if (java == JavaPackageType.JRE) { + into("darwin".equals(platform) ? 'jre.app' : 'jre') { + with jreFiles(project, platform, architecture) + } } into('') { from { @@ -73,7 +79,7 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla rename { 'LICENSE.txt' } } - with noticeFile(jdk) + with noticeFile(java) into('modules') { with modulesFiles } @@ -84,77 +90,84 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla distribution_archives { integTestZip { content { - archiveFiles(transportModulesFiles, 'zip', null, 'x64', false) + archiveFiles(transportModulesFiles, 'zip', null, 'x64', JavaPackageType.NONE) } } darwinTar { archiveClassifier = 'darwin-x64' content { - archiveFiles(modulesFiles('darwin-x64'), 'tar', 'darwin', 'x64', true) + archiveFiles(modulesFiles('darwin-x64'), 'tar', 'darwin', 'x64', JavaPackageType.JDK) } } darwinArm64Tar { archiveClassifier = 'darwin-arm64' content { - archiveFiles(modulesFiles('darwin-arm64'), 'tar', 'darwin', 'arm64', true) + archiveFiles(modulesFiles('darwin-arm64'), 'tar', 'darwin', 'arm64', JavaPackageType.JDK) } } noJdkDarwinTar { archiveClassifier = 'no-jdk-darwin-x64' content { - archiveFiles(modulesFiles('darwin-x64'), 'tar', 'darwin', 'x64', false) + archiveFiles(modulesFiles('darwin-x64'), 'tar', 'darwin', 'x64', JavaPackageType.NONE) } } noJdkDarwinArm64Tar { archiveClassifier = 'no-jdk-darwin-arm64' content { - archiveFiles(modulesFiles('darwin-arm64'), 'tar', 'darwin', 'arm64', false) + archiveFiles(modulesFiles('darwin-arm64'), 'tar', 'darwin', 'arm64', JavaPackageType.NONE) } } freebsdTar { archiveClassifier = 'freebsd-x64' content { - archiveFiles(modulesFiles('freebsd-x64'), 'tar', 'freebsd', 'x64', false) + archiveFiles(modulesFiles('freebsd-x64'), 'tar', 'freebsd', 'x64', JavaPackageType.NONE) } } noJdkFreebsdTar { archiveClassifier = 'no-jdk-freebsd-x64' content { - archiveFiles(modulesFiles('freebsd-x64'), 'tar', 'freebsd', 'x64', false) + archiveFiles(modulesFiles('freebsd-x64'), 'tar', 'freebsd', 'x64', JavaPackageType.NONE) } } linuxArm64Tar { archiveClassifier = 'linux-arm64' content { - archiveFiles(modulesFiles('linux-arm64'), 'tar', 'linux', 'arm64', true) + archiveFiles(modulesFiles('linux-arm64'), 'tar', 'linux', 'arm64', JavaPackageType.JDK) } } noJdkLinuxArm64Tar { archiveClassifier = 'no-jdk-linux-arm64' content { - archiveFiles(modulesFiles('linux-arm64'), 'tar', 'linux', 'arm64', false) + archiveFiles(modulesFiles('linux-arm64'), 'tar', 'linux', 'arm64', JavaPackageType.NONE) } } linuxTar { archiveClassifier = 'linux-x64' content { - archiveFiles(modulesFiles('linux-x64'), 'tar', 'linux', 'x64', true) + archiveFiles(modulesFiles('linux-x64'), 'tar', 'linux', 'x64', JavaPackageType.JDK) } } noJdkLinuxTar { archiveClassifier = 'no-jdk-linux-x64' content { - archiveFiles(modulesFiles('linux-x64'), 'tar', 'linux', 'x64', false) + archiveFiles(modulesFiles('linux-x64'), 'tar', 'linux', 'x64', JavaPackageType.NONE) + } + } + + jreLinuxTar { + archiveClassifier = 'jre-linux-x64' + content { + archiveFiles(modulesFiles('linux-x64'), 'tar', 'linux', 'x64', JavaPackageType.JRE) } } @@ -163,7 +176,7 @@ distribution_archives { linuxS390xTar { archiveClassifier = 'linux-s390x' content { - archiveFiles(modulesFiles('linux-s390x'), 'tar', 'linux', 's390x', false) + archiveFiles(modulesFiles('linux-s390x'), 'tar', 'linux', 's390x', JavaPackageType.NONE) } } @@ -171,28 +184,28 @@ distribution_archives { linuxPpc64leTar { archiveClassifier = 'linux-ppc64le' content { - archiveFiles(modulesFiles('linux-ppc64le'), 'tar', 'linux', 'ppc64le', true) + archiveFiles(modulesFiles('linux-ppc64le'), 'tar', 'linux', 'ppc64le', JavaPackageType.JDK) } } noJdkLinuxPpc64leTar { archiveClassifier = 'no-jdk-linux-ppc64le' content { - archiveFiles(modulesFiles('linux-ppc64le'), 'tar', 'linux', 'ppc64le', false) + archiveFiles(modulesFiles('linux-ppc64le'), 'tar', 'linux', 'ppc64le', JavaPackageType.NONE) } } windowsZip { archiveClassifier = 'windows-x64' content { - archiveFiles(modulesFiles('windows-x64'), 'zip', 'windows', 'x64', true) + archiveFiles(modulesFiles('windows-x64'), 'zip', 'windows', 'x64', JavaPackageType.JDK) } } noJdkWindowsZip { archiveClassifier = 'no-jdk-windows-x64' content { - archiveFiles(modulesFiles('windows-x64'), 'zip', 'windows', 'x64', false) + archiveFiles(modulesFiles('windows-x64'), 'zip', 'windows', 'x64', JavaPackageType.NONE) } } } diff --git a/distribution/build.gradle b/distribution/build.gradle index 2378ed833ead7..35ca84ca66dba 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -36,6 +36,7 @@ import org.opensearch.gradle.MavenFilteringHack import org.opensearch.gradle.NoticeTask import org.opensearch.gradle.VersionProperties import org.opensearch.gradle.info.BuildParams +import org.opensearch.gradle.JavaPackageType import java.nio.file.Files import java.nio.file.Path @@ -275,13 +276,14 @@ project(':test:external-modules').subprojects.each { Project testModule -> configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { apply plugin: 'opensearch.jdk-download' + apply plugin: 'opensearch.jre-download' apply plugin: 'opensearch.repositories' // Setup all required JDKs project.jdks { ['darwin', 'linux', 'windows'].each { platform -> (platform == 'linux' || platform == 'darwin' ? ['x64', 'aarch64', 's390x', 'ppc64le'] : ['x64']).each { architecture -> - "bundled_${platform}_${architecture}" { + "bundled_jdk_${platform}_${architecture}" { it.platform = platform it.version = VersionProperties.getBundledJdk(platform, architecture) it.vendor = VersionProperties.bundledJdkVendor @@ -291,6 +293,20 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { } } + // Setup all required JREs + project.jres { + ['darwin', 'linux', 'windows'].each { platform -> + (platform == 'linux' || platform == 'darwin' ? ['x64', 'aarch64', 's390x', 'ppc64le'] : ['x64']).each { architecture -> + "bundled_jre_${platform}_${architecture}" { + it.platform = platform + it.version = VersionProperties.getBundledJre(platform, architecture) + it.vendor = VersionProperties.bundledJdkVendor + it.architecture = architecture + } + } + } + } + // TODO: the map needs to be an input of the tasks, so that when it changes, the task will re-run... /***************************************************************************** * Properties to expand when copying packaging files * @@ -377,20 +393,20 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { from buildTransportModulesTaskProvider } - configFiles = { distributionType, jdk -> + configFiles = { distributionType, java -> copySpec { with copySpec { // main config files, processed with distribution specific substitutions from '../src/config' exclude 'log4j2.properties' // this is handled separately below - MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, jdk)) + MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, java)) } from project(':distribution').buildLog4jConfig from project(':distribution').buildConfig } } - binFiles = { distributionType, jdk -> + binFiles = { distributionType, java -> copySpec { // non-windows files, for all distributions with copySpec { @@ -398,7 +414,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { exclude '*.exe' exclude '*.bat' eachFile { it.setMode(0755) } - MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, jdk)) + MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, java)) } // windows files, only for zip if (distributionType == 'zip') { @@ -406,7 +422,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { from '../src/bin' include '*.bat' filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf')) - MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, jdk)) + MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, java)) } with copySpec { from '../src/bin' @@ -424,12 +440,12 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { } } - noticeFile = { jdk -> + noticeFile = { java -> copySpec { if (project.name == 'integ-test-zip') { from buildServerNoticeTaskProvider } else { - if (jdk) { + if (java != JavaPackageType.NONE) { from buildNoticeTaskProvider } else { from buildNoJdkNoticeTaskProvider @@ -446,7 +462,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { if ("arm64".equals(architecture)) { architecture = "aarch64" } - from project.jdks."bundled_${platform}_${architecture}" + from project.jdks."bundled_jdk_${platform}_${architecture}" exclude "demo/**" /* * The Contents/MacOS directory interferes with notarization, and is unused by our distribution, so we exclude @@ -465,6 +481,31 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { } } } + + jreFiles = { Project project, String platform, String architecture -> + return copySpec { + /* + * Jdk uses aarch64 from ARM. Translating from arm64 to aarch64 which Jdk understands. + */ + if ("arm64".equals(architecture)) { + architecture = "aarch64" + } + from project.jres."bundled_jre_${platform}_${architecture}" + exclude "demo/**" + /* + * The Contents/MacOS directory interferes with notarization, and is unused by our distribution, so we exclude + * it from the build. + */ + if ("darwin".equals(platform)) { + exclude "Contents/MacOS" + } + eachFile { FileCopyDetails details -> + if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') { + details.mode = 0755 + } + } + } + } } } @@ -583,7 +624,7 @@ subprojects { ], 'opensearch.bundled_jdk': [ - 'def': jdk ? 'true' : 'false' + 'def': jdk != JavaPackageType.NONE ? true : false ], 'license.name': [ diff --git a/distribution/src/bin/opensearch-env b/distribution/src/bin/opensearch-env index f94824576b122..cd7a0b2b4520b 100644 --- a/distribution/src/bin/opensearch-env +++ b/distribution/src/bin/opensearch-env @@ -46,7 +46,7 @@ fi # now set the classpath OPENSEARCH_CLASSPATH="$OPENSEARCH_HOME/lib/*" -# now set the path to java: OPENSEARCH_JAVA_HOME -> JAVA_HOME -> bundled JDK +# now set the path to java: OPENSEARCH_JAVA_HOME -> JAVA_HOME -> bundled JRE -> bundled JDK if [ ! -z "$OPENSEARCH_JAVA_HOME" ]; then JAVA="$OPENSEARCH_JAVA_HOME/bin/java" JAVA_TYPE="OPENSEARCH_JAVA_HOME" @@ -57,13 +57,18 @@ else if [ $OS = "darwin" ]; then # macOS bundled Java JAVA="$OPENSEARCH_HOME/jdk.app/Contents/Home/bin/java" + JAVA_TYPE="bundled jdk" elif [ $OS = "freebsd" ]; then # using FreeBSD default java from ports if JAVA_HOME is not set JAVA="/usr/local/bin/java" + JAVA_TYPE="bundled jdk" + elif [ -d "$OPENSEARCH_HOME/jre" ]; then + JAVA="$OPENSEARCH_HOME/jre/bin/java" + JAVA_TYPE="bundled jre" else JAVA="$OPENSEARCH_HOME/jdk/bin/java" + JAVA_TYPE="bundled jdk" fi - JAVA_TYPE="bundled jdk" fi if [ ! -x "$JAVA" ]; then diff --git a/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java b/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java index 1acdf00ca6af0..4ce7013e1052f 100644 --- a/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java +++ b/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java @@ -171,7 +171,7 @@ public class JvmInfo implements ReportingService.Info { } final boolean bundledJdk = Booleans.parseBoolean(System.getProperty("opensearch.bundled_jdk", Boolean.FALSE.toString())); - final Boolean usingBundledJdk = bundledJdk ? usingBundledJdk() : null; + final Boolean usingBundledJdkOrJre = bundledJdk ? usingBundledJdkOrJre() : null; INSTANCE = new JvmInfo( JvmPid.getPid(), @@ -180,7 +180,7 @@ public class JvmInfo implements ReportingService.Info { runtimeMXBean.getVmVersion(), runtimeMXBean.getVmVendor(), bundledJdk, - usingBundledJdk, + usingBundledJdkOrJre, runtimeMXBean.getStartTime(), configuredInitialHeapSize, configuredMaxHeapSize, @@ -201,7 +201,7 @@ public class JvmInfo implements ReportingService.Info { } @SuppressForbidden(reason = "PathUtils#get") - private static boolean usingBundledJdk() { + private static boolean usingBundledJdkOrJre() { /* * We are using the bundled JDK if java.home is the jdk sub-directory of our working directory. This is because we always set * the working directory of Elasticsearch to home, and the bundled JDK is in the jdk sub-directory there. @@ -211,7 +211,8 @@ private static boolean usingBundledJdk() { if (Constants.MAC_OS_X) { return PathUtils.get(javaHome).equals(PathUtils.get(userDir).resolve("jdk.app/Contents/Home").toAbsolutePath()); } else { - return PathUtils.get(javaHome).equals(PathUtils.get(userDir).resolve("jdk").toAbsolutePath()); + return PathUtils.get(javaHome).equals(PathUtils.get(userDir).resolve("jre").toAbsolutePath()) + || PathUtils.get(javaHome).equals(PathUtils.get(userDir).resolve("jdk").toAbsolutePath()); } } diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index d768165451a5a..fe630dfe27e3a 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -428,7 +428,7 @@ protected Node( Constants.JVM_VERSION ); if (jvmInfo.getBundledJdk()) { - logger.info("JVM home [{}], using bundled JDK [{}]", System.getProperty("java.home"), jvmInfo.getUsingBundledJdk()); + logger.info("JVM home [{}], using bundled JDK/JRE [{}]", System.getProperty("java.home"), jvmInfo.getUsingBundledJdk()); } else { logger.info("JVM home [{}]", System.getProperty("java.home")); deprecationLogger.deprecate( diff --git a/settings.gradle b/settings.gradle index b7d47cd9b745e..94f5de26f4090 100644 --- a/settings.gradle +++ b/settings.gradle @@ -53,6 +53,7 @@ List projects = [ 'distribution:archives:no-jdk-linux-ppc64le-tar', 'distribution:archives:linux-tar', 'distribution:archives:no-jdk-linux-tar', + 'distribution:archives:jre-linux-tar', 'distribution:docker', 'distribution:docker:docker-arm64-build-context', 'distribution:docker:docker-arm64-export', From b282005191bca3e3dd5b5ad5eb347dcaf296e4e5 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Fri, 28 Jul 2023 13:48:19 -0500 Subject: [PATCH 21/75] Replace deprecated Query#rewrite(IndexReader with rewrite(IndexSearcher (#8967) * Replace deprecated Query#rewrite(IndexReader with rewrite(IndexSearcher Query#rewrite(IndexReader reader) is deprecated in Lucene 9.7 and removed in Lucene 10 in favor of Query#rewrite(IndexSearcher searcher). The latter provides LeafCollector hooks to optimize for concurrent queries. This commit cuts over usage of rewrite(IndexReader) to rewrite(IndexSearcher) for Lucene 10 compatibility and upstream query optimizations. Signed-off-by: Nicholas Walter Knize * remove stray collector.finish Signed-off-by: Nicholas Walter Knize --------- Signed-off-by: Nicholas Walter Knize --- .../join/query/HasChildQueryBuilder.java | 5 +++-- .../org/opensearch/percolator/PercolateQuery.java | 5 ++--- .../opensearch/percolator/CandidateQueryTests.java | 2 +- .../apache/lucene/queries/BlendedTermQuery.java | 6 ++++-- .../common/lucene/search/MoreLikeThisQuery.java | 8 ++++---- .../lucene/search/MultiPhrasePrefixQuery.java | 7 ++++--- .../lucene/search/function/FunctionScoreQuery.java | 10 +++++----- .../lucene/search/function/ScriptScoreQuery.java | 7 +++---- .../SortedUnsignedLongDocValuesRangeQuery.java | 5 ++--- .../index/query/DateRangeIncludingNowQuery.java | 4 ++-- .../search/OpenSearchToParentBlockJoinQuery.java | 7 +++---- .../org/opensearch/lucene/queries/MinDocQuery.java | 3 ++- .../suggest/completion/CompletionSuggester.java | 2 +- .../deps/lucene/VectorHighlighterTests.java | 2 +- .../index/mapper/DateFieldTypeTests.java | 6 +++++- .../query/SpanMultiTermQueryBuilderTests.java | 5 +++-- .../OpenSearchToParentBlockJoinQueryTests.java | 3 ++- .../lucene/queries/MinDocQueryTests.java | 5 +++-- .../lucene/queries/SpanMatchNoDocsQueryTests.java | 14 +++----------- .../search/internal/ContextIndexSearcherTests.java | 7 +++---- 20 files changed, 56 insertions(+), 57 deletions(-) diff --git a/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java index 4f26a15031dd4..b38bc5d7b45fc 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java @@ -416,11 +416,12 @@ public void visit(QueryVisitor visitor) { } @Override - public Query rewrite(IndexReader reader) throws IOException { - Query rewritten = super.rewrite(reader); + public Query rewrite(IndexSearcher searcher) throws IOException { + Query rewritten = super.rewrite(searcher); if (rewritten != this) { return rewritten; } + IndexReader reader = searcher.getIndexReader(); if (reader instanceof DirectoryReader) { IndexSearcher indexSearcher = new IndexSearcher(reader); indexSearcher.setQueryCache(null); diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java index 86d30f009e709..2702a02a7a951 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java @@ -32,7 +32,6 @@ package org.opensearch.percolator; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; @@ -89,8 +88,8 @@ final class PercolateQuery extends Query implements Accountable { } @Override - public Query rewrite(IndexReader reader) throws IOException { - Query rewritten = candidateMatchesQuery.rewrite(reader); + public Query rewrite(IndexSearcher searcher) throws IOException { + Query rewritten = candidateMatchesQuery.rewrite(searcher); if (rewritten != candidateMatchesQuery) { return new PercolateQuery( name, diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java index 3b953fcfe65e1..1040d014483e1 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java @@ -1275,7 +1275,7 @@ private CustomQuery(Term term) { } @Override - public Query rewrite(IndexReader reader) throws IOException { + public Query rewrite(IndexSearcher searcher) throws IOException { return new TermQuery(term); } diff --git a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java index 3a36a6ff103e0..9d276cfe283eb 100644 --- a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java @@ -42,6 +42,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.TermQuery; @@ -93,11 +94,12 @@ public BlendedTermQuery(Term[] terms, float[] boosts) { } @Override - public Query rewrite(IndexReader reader) throws IOException { - Query rewritten = super.rewrite(reader); + public Query rewrite(IndexSearcher searcher) throws IOException { + Query rewritten = super.rewrite(searcher); if (rewritten != this) { return rewritten; } + IndexReader reader = searcher.getIndexReader(); IndexReaderContext context = reader.getContext(); TermStates[] ctx = new TermStates[terms.length]; int[] docFreqs = new int[ctx.length]; diff --git a/server/src/main/java/org/opensearch/common/lucene/search/MoreLikeThisQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/MoreLikeThisQuery.java index b4440b85e0037..ef07f6ea8052c 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/MoreLikeThisQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/MoreLikeThisQuery.java @@ -36,12 +36,12 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.index.Fields; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.similarities.ClassicSimilarity; @@ -144,12 +144,12 @@ public boolean equals(Object obj) { } @Override - public Query rewrite(IndexReader reader) throws IOException { - Query rewritten = super.rewrite(reader); + public Query rewrite(IndexSearcher searcher) throws IOException { + Query rewritten = super.rewrite(searcher); if (rewritten != this) { return rewritten; } - XMoreLikeThis mlt = new XMoreLikeThis(reader, similarity == null ? new ClassicSimilarity() : similarity); + XMoreLikeThis mlt = new XMoreLikeThis(searcher.getIndexReader(), similarity == null ? new ClassicSimilarity() : similarity); mlt.setFieldNames(moreLikeFields); mlt.setAnalyzer(analyzer); diff --git a/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java index 98ebb34fc040e..cc0468efb243e 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java @@ -39,6 +39,7 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.Query; @@ -159,8 +160,8 @@ public int[] getPositions() { } @Override - public Query rewrite(IndexReader reader) throws IOException { - Query rewritten = super.rewrite(reader); + public Query rewrite(IndexSearcher searcher) throws IOException { + Query rewritten = super.rewrite(searcher); if (rewritten != this) { return rewritten; } @@ -177,7 +178,7 @@ public Query rewrite(IndexReader reader) throws IOException { int position = positions.get(sizeMinus1); Set terms = new HashSet<>(); for (Term term : suffixTerms) { - getPrefixTerms(terms, term, reader); + getPrefixTerms(terms, term, searcher.getIndexReader()); if (terms.size() > maxExpansions) { break; } diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java index ebd155b8a5679..db92d4f1cba96 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java @@ -128,7 +128,7 @@ protected int doHashCode() { @Override protected ScoreFunction rewrite(IndexReader reader) throws IOException { - Query newFilter = filter.rewrite(reader); + Query newFilter = filter.rewrite(new IndexSearcher(reader)); if (newFilter == filter) { return this; } @@ -322,16 +322,16 @@ public void visit(QueryVisitor visitor) { } @Override - public Query rewrite(IndexReader reader) throws IOException { - Query rewritten = super.rewrite(reader); + public Query rewrite(IndexSearcher searcher) throws IOException { + Query rewritten = super.rewrite(searcher); if (rewritten != this) { return rewritten; } - Query newQ = subQuery.rewrite(reader); + Query newQ = subQuery.rewrite(searcher); ScoreFunction[] newFunctions = new ScoreFunction[functions.length]; boolean needsRewrite = (newQ != subQuery); for (int i = 0; i < functions.length; i++) { - newFunctions[i] = functions[i].rewrite(reader); + newFunctions[i] = functions[i].rewrite(searcher.getIndexReader()); needsRewrite |= (newFunctions[i] != functions[i]); } if (needsRewrite) { diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java index 8bf5fc0f89d31..07084a6f690e4 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java @@ -32,7 +32,6 @@ package org.opensearch.common.lucene.search.function; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.DocIdSetIterator; @@ -105,12 +104,12 @@ public ScriptScoreQuery( } @Override - public Query rewrite(IndexReader reader) throws IOException { - Query newQ = subQuery.rewrite(reader); + public Query rewrite(IndexSearcher searcher) throws IOException { + Query newQ = subQuery.rewrite(searcher); if (newQ != subQuery) { return new ScriptScoreQuery(newQ, queryName, script, scriptBuilder, minScore, indexName, shardId, indexVersion); } - return super.rewrite(reader); + return super.rewrite(searcher); } @Override diff --git a/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesRangeQuery.java b/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesRangeQuery.java index 04dd669d60883..234c67cc637f1 100644 --- a/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesRangeQuery.java +++ b/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesRangeQuery.java @@ -10,7 +10,6 @@ import org.apache.lucene.index.DocValues; import org.apache.lucene.index.FieldInfo; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; @@ -87,12 +86,12 @@ public String toString(String field) { } @Override - public Query rewrite(IndexReader reader) throws IOException { + public Query rewrite(IndexSearcher searcher) throws IOException { if (Long.compareUnsigned(lowerValue, Numbers.MIN_UNSIGNED_LONG_VALUE_AS_LONG) == 0 && Long.compareUnsigned(upperValue, Numbers.MAX_UNSIGNED_LONG_VALUE_AS_LONG) == 0) { return new FieldExistsQuery(field); } - return super.rewrite(reader); + return super.rewrite(searcher); } abstract SortedNumericDocValues getValues(LeafReader reader, String field) throws IOException; diff --git a/server/src/main/java/org/opensearch/index/query/DateRangeIncludingNowQuery.java b/server/src/main/java/org/opensearch/index/query/DateRangeIncludingNowQuery.java index 2a7dbf314103d..001d77facb829 100644 --- a/server/src/main/java/org/opensearch/index/query/DateRangeIncludingNowQuery.java +++ b/server/src/main/java/org/opensearch/index/query/DateRangeIncludingNowQuery.java @@ -32,8 +32,8 @@ package org.opensearch.index.query; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; @@ -60,7 +60,7 @@ public Query getQuery() { } @Override - public Query rewrite(IndexReader reader) throws IOException { + public Query rewrite(IndexSearcher searcher) throws IOException { return in; } diff --git a/server/src/main/java/org/opensearch/index/search/OpenSearchToParentBlockJoinQuery.java b/server/src/main/java/org/opensearch/index/search/OpenSearchToParentBlockJoinQuery.java index 770301629d964..cd28d8b6ef04e 100644 --- a/server/src/main/java/org/opensearch/index/search/OpenSearchToParentBlockJoinQuery.java +++ b/server/src/main/java/org/opensearch/index/search/OpenSearchToParentBlockJoinQuery.java @@ -32,7 +32,6 @@ package org.opensearch.index.search; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; @@ -82,8 +81,8 @@ public ScoreMode getScoreMode() { } @Override - public Query rewrite(IndexReader reader) throws IOException { - Query innerRewrite = query.rewrite(reader); + public Query rewrite(IndexSearcher searcher) throws IOException { + Query innerRewrite = query.rewrite(searcher); if (innerRewrite != query) { // Right now ToParentBlockJoinQuery always rewrites to a ToParentBlockJoinQuery // so the else block will never be used. It is useful in the case that @@ -97,7 +96,7 @@ public Query rewrite(IndexReader reader) throws IOException { return innerRewrite; } } - return super.rewrite(reader); + return super.rewrite(searcher); } @Override diff --git a/server/src/main/java/org/opensearch/lucene/queries/MinDocQuery.java b/server/src/main/java/org/opensearch/lucene/queries/MinDocQuery.java index 2af0237e800c0..5c904d8a7770d 100644 --- a/server/src/main/java/org/opensearch/lucene/queries/MinDocQuery.java +++ b/server/src/main/java/org/opensearch/lucene/queries/MinDocQuery.java @@ -86,7 +86,8 @@ public boolean equals(Object obj) { } @Override - public Query rewrite(IndexReader reader) throws IOException { + public Query rewrite(IndexSearcher searcher) throws IOException { + final IndexReader reader = searcher.getIndexReader(); if (Objects.equals(reader.getContext().id(), readerId) == false) { return new MinDocQuery(minDoc, reader.getContext().id()); } diff --git a/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggester.java b/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggester.java index df18e874be9c9..fabb67d7fb841 100644 --- a/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggester.java +++ b/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggester.java @@ -104,7 +104,7 @@ protected Suggest.Suggestion query.rewrite(reader)); + IndexSearcher searcher = new IndexSearcher(reader); + Query query = queryBuilder.toQuery(createShardContext(searcher)); + RuntimeException exc = expectThrows(RuntimeException.class, () -> query.rewrite(searcher)); assertThat(exc.getMessage(), containsString("maxClauseCount")); } finally { BooleanQuery.setMaxClauseCount(origBoolMaxClauseCount); diff --git a/server/src/test/java/org/opensearch/index/search/OpenSearchToParentBlockJoinQueryTests.java b/server/src/test/java/org/opensearch/index/search/OpenSearchToParentBlockJoinQueryTests.java index 39160b1ac49bc..45172c946ab36 100644 --- a/server/src/test/java/org/opensearch/index/search/OpenSearchToParentBlockJoinQueryTests.java +++ b/server/src/test/java/org/opensearch/index/search/OpenSearchToParentBlockJoinQueryTests.java @@ -34,6 +34,7 @@ import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.Term; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -112,7 +113,7 @@ public void testRewrite() throws IOException { ScoreMode.Avg, "nested" ); - Query rewritten = q.rewrite(new MultiReader()); + Query rewritten = q.rewrite(new IndexSearcher(new MultiReader())); assertEquals(expected, rewritten); } } diff --git a/server/src/test/java/org/opensearch/lucene/queries/MinDocQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/MinDocQueryTests.java index a75a1d5b2fdb7..701ba5a756b40 100644 --- a/server/src/test/java/org/opensearch/lucene/queries/MinDocQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/MinDocQueryTests.java @@ -61,10 +61,11 @@ public void testBasics() { public void testRewrite() throws Exception { IndexReader reader = new MultiReader(); + IndexSearcher searcher = new IndexSearcher(reader); MinDocQuery query = new MinDocQuery(42); - Query rewritten = query.rewrite(reader); + Query rewritten = query.rewrite(searcher); QueryUtils.checkUnequal(query, rewritten); - Query rewritten2 = rewritten.rewrite(reader); + Query rewritten2 = rewritten.rewrite(searcher); assertSame(rewritten, rewritten2); } diff --git a/server/src/test/java/org/opensearch/lucene/queries/SpanMatchNoDocsQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/SpanMatchNoDocsQueryTests.java index 727377c9a5d53..81f4290b6588e 100644 --- a/server/src/test/java/org/opensearch/lucene/queries/SpanMatchNoDocsQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/SpanMatchNoDocsQueryTests.java @@ -56,19 +56,11 @@ public class SpanMatchNoDocsQueryTests extends OpenSearchTestCase { public void testSimple() throws Exception { - Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig()); - IndexReader ir = DirectoryReader.open(iw); - SpanMatchNoDocsQuery query = new SpanMatchNoDocsQuery("field", "a good reason"); assertEquals(query.toString(), "SpanMatchNoDocsQuery(\"a good reason\")"); - Query rewrite = query.rewrite(ir); + Query rewrite = query.rewrite((IndexSearcher) null); assertTrue(rewrite instanceof SpanMatchNoDocsQuery); assertEquals(rewrite.toString(), "SpanMatchNoDocsQuery(\"a good reason\")"); - - iw.close(); - ir.close(); - dir.close(); } public void testQuery() throws Exception { @@ -101,7 +93,7 @@ public void testQuery() throws Exception { assertEquals(searcher.count(orQuery), 1); hits = searcher.search(orQuery, 1000).scoreDocs; assertEquals(1, hits.length); - Query rewrite = orQuery.rewrite(ir); + Query rewrite = orQuery.rewrite(searcher); assertEquals(rewrite, orQuery); SpanNearQuery nearQuery = new SpanNearQuery( @@ -112,7 +104,7 @@ public void testQuery() throws Exception { assertEquals(searcher.count(nearQuery), 0); hits = searcher.search(nearQuery, 1000).scoreDocs; assertEquals(0, hits.length); - rewrite = nearQuery.rewrite(ir); + rewrite = nearQuery.rewrite(searcher); assertEquals(rewrite, nearQuery); iw.close(); diff --git a/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java index f3907355ac6ec..823fc6b463906 100644 --- a/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java @@ -40,7 +40,6 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReader; @@ -466,12 +465,12 @@ public String toString(String field) { } @Override - public Query rewrite(IndexReader reader) throws IOException { - Query queryRewritten = query.rewrite(reader); + public Query rewrite(IndexSearcher searcher) throws IOException { + Query queryRewritten = query.rewrite(searcher); if (query != queryRewritten) { return new CreateScorerOnceQuery(queryRewritten); } - return super.rewrite(reader); + return super.rewrite(searcher); } @Override From 36c37ace30cdf9d2f92176a3aa38b60865c33056 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Fri, 28 Jul 2023 14:13:02 -0500 Subject: [PATCH 22/75] [Refactor] XContentFactory contentType introspection to MediaTypeParserRegistry (#8826) XContentFactory is tightly coupled to concrete XContentType. This commit builds on the MediaType abstractions, specifically MediaTypeParserRegistry, to decouple contentType introspection (e.g., determining type from byte streams) from concrete XContentTypes. This enables downstream extensions (e.g., serverless or cloud native implementations) to register their own custom XContentType and define the serialization format for proper content introspection. This also removes the tight coupling of :libs:opensearch-x-content with abstract interface contracts further enabling modularity. Signed-off-by: Nicholas Walter Knize --- .../client/indices/CreateIndexRequest.java | 2 +- .../indices/PutIndexTemplateRequest.java | 3 +- .../client/indices/PutMappingRequest.java | 3 +- .../client/AbstractRequestTestCase.java | 3 +- .../client/AbstractResponseTestCase.java | 3 +- .../client/core/GetSourceResponseTests.java | 4 +- .../indices/CloseIndexResponseTests.java | 5 +- .../GetIndexTemplatesResponseTests.java | 4 +- .../opensearch/core/xcontent/MediaType.java | 10 + .../core/xcontent/MediaTypeRegistry.java | 215 +++++++++++++ .../common/xcontent/XContentFactory.java | 282 +----------------- .../common/xcontent/XContentType.java | 105 +++++++ .../xcontent/json/JsonXContentGenerator.java | 6 +- .../ingest/useragent/UserAgentParser.java | 3 +- .../mustache/SearchTemplateResponse.java | 6 +- .../TransportSearchTemplateAction.java | 4 +- .../SearchTemplateRequestXContentTests.java | 10 +- .../mustache/SearchTemplateResponseTests.java | 9 +- .../percolator/PercolateQueryBuilder.java | 22 +- .../DiscountedCumulativeGainTests.java | 4 +- .../rankeval/ExpectedReciprocalRankTests.java | 4 +- .../rankeval/MeanReciprocalRankTests.java | 4 +- .../index/rankeval/PrecisionAtKTests.java | 4 +- .../index/rankeval/RankEvalResponseTests.java | 4 +- .../index/rankeval/RatedDocumentTests.java | 4 +- .../index/rankeval/RatedRequestsTests.java | 4 +- .../index/rankeval/RecallAtKTests.java | 4 +- .../AbstractBulkByQueryRestHandler.java | 4 +- .../aggregations/FiltersAggsRewriteIT.java | 4 +- .../create/CreateSnapshotRequest.java | 3 +- .../action/admin/indices/alias/Alias.java | 3 +- .../indices/alias/IndicesAliasesRequest.java | 3 +- .../mapping/put/PutMappingRequest.java | 3 +- .../opensearch/action/index/IndexRequest.java | 6 +- .../termvectors/TermVectorsRequest.java | 4 +- .../action/update/UpdateRequest.java | 6 +- .../cluster/metadata/AliasMetadata.java | 3 +- .../cluster/metadata/AliasValidator.java | 11 +- .../cluster/metadata/IndexMetadata.java | 4 +- .../common/compress/CompressorFactory.java | 8 +- .../opensearch/common/settings/Settings.java | 8 +- .../common/xcontent/XContentHelper.java | 53 +--- .../gateway/MetadataStateFormat.java | 10 +- .../gateway/PersistedClusterStateService.java | 12 +- .../opensearch/index/get/ShardGetService.java | 8 +- .../index/mapper/SourceFieldMapper.java | 6 +- .../index/query/WrapperQueryBuilder.java | 7 +- .../functionscore/DecayFunctionBuilder.java | 5 +- .../reindex/ClientScrollableHitSource.java | 4 +- .../index/reindex/ReindexRequest.java | 4 +- .../opensearch/index/shard/IndexShard.java | 4 +- .../opensearch/indices/IndicesService.java | 4 +- .../blobstore/ChecksumBlobStoreFormat.java | 10 +- .../opensearch/rest/AbstractRestChannel.java | 8 +- .../action/document/RestGetSourceAction.java | 4 +- .../CompletionSuggestionBuilder.java | 6 +- .../suggest/phrase/PhraseSuggester.java | 5 +- .../org/opensearch/tasks/RawTaskStatus.java | 4 +- .../java/org/opensearch/tasks/TaskResult.java | 4 +- .../opensearch/tasks/TaskResultsService.java | 4 +- .../node/tasks/TransportTasksActionTests.java | 4 +- .../reroute/ClusterRerouteRequestTests.java | 4 +- .../indices/alias/AliasActionsTests.java | 4 +- .../create/CreateIndexRequestTests.java | 8 +- .../rollover/RolloverRequestTests.java | 3 +- .../put/PutIndexTemplateRequestTests.java | 5 +- .../action/bulk/BulkRequestTests.java | 16 +- .../action/explain/ExplainResponseTests.java | 4 +- .../MergedFieldCapabilitiesResponseTests.java | 4 +- .../action/get/MultiGetRequestTests.java | 2 +- .../action/get/MultiGetResponseTests.java | 3 +- .../action/support/IndicesOptionsTests.java | 8 +- .../AbstractShapeBuilderTestCase.java | 4 +- .../common/xcontent/BaseXContentTestCase.java | 3 +- .../common/xcontent/XContentFactoryTests.java | 33 +- .../builder/XContentBuilderTests.java | 72 ++--- .../common/xcontent/cbor/JsonVsCborTests.java | 5 +- .../xcontent/smile/JsonVsSmileTests.java | 5 +- .../AbstractXContentFilteringTestCase.java | 3 +- .../index/mapper/SourceFieldMapperTests.java | 6 +- .../index/query/BoolQueryBuilderTests.java | 4 +- .../index/query/InnerHitBuilderTests.java | 4 +- .../snapshots/blobstore/FileInfoTests.java | 6 +- ...oteStoreShardShallowCopySnapshotTests.java | 4 +- .../ingest/IngestMetadataTests.java | 4 +- .../PersistentTasksCustomMetadataTests.java | 3 +- .../indices/RestGetAliasesActionTests.java | 16 +- .../org/opensearch/script/ScriptTests.java | 5 +- .../opensearch/script/StoredScriptTests.java | 30 +- .../search/NestedIdentityTests.java | 4 +- .../AggregatorFactoriesTests.java | 4 +- .../aggregations/bucket/FiltersTests.java | 12 +- .../support/IncludeExcludeTests.java | 6 +- .../builder/SearchSourceBuilderTests.java | 10 +- .../highlight/HighlightBuilderTests.java | 6 +- .../highlight/HighlightFieldTests.java | 4 +- .../internal/ShardSearchRequestTests.java | 3 +- .../rescore/QueryRescorerBuilderTests.java | 4 +- .../searchafter/SearchAfterBuilderTests.java | 5 +- .../search/slice/SliceBuilderTests.java | 4 +- .../search/sort/AbstractSortTestCase.java | 4 +- .../search/sort/NestedSortBuilderTests.java | 4 +- .../AbstractSuggestionBuilderTestCase.java | 4 +- .../search/suggest/SuggestBuilderTests.java | 4 +- .../phrase/DirectCandidateGeneratorTests.java | 4 +- .../phrase/SmoothingModelTestCase.java | 4 +- .../org/opensearch/tasks/TaskResultTests.java | 4 +- .../index/RandomCreateIndexGenerator.java | 4 +- .../index/engine/TranslogHandler.java | 10 +- .../search/RandomSearchRequestGenerator.java | 2 +- .../aggregations/BaseAggregationTestCase.java | 6 +- .../BasePipelineAggregationTestCase.java | 4 +- .../test/AbstractXContentTestCase.java | 5 +- .../opensearch/test/OpenSearchTestCase.java | 4 +- .../org/opensearch/test/RandomObjects.java | 4 +- .../opensearch/test/XContentTestUtils.java | 3 +- .../yaml/ClientYamlTestExecutionContext.java | 4 +- .../test/rest/yaml/ObjectPathTests.java | 7 +- 118 files changed, 711 insertions(+), 667 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java b/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java index 16915b32c16fe..cffed98fce3aa 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java @@ -187,7 +187,7 @@ public CreateIndexRequest mapping(XContentBuilder source) { */ public CreateIndexRequest mapping(Map source) { try { - XContentBuilder builder = XContentFactory.contentBuilder(MediaTypeRegistry.getDefaultMediaType()); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(MediaTypeRegistry.getDefaultMediaType()); builder.map(source); return mapping(BytesReference.bytes(builder), builder.contentType()); } catch (IOException e) { diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutIndexTemplateRequest.java b/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutIndexTemplateRequest.java index fed958955af99..09dbbd63b9479 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutIndexTemplateRequest.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutIndexTemplateRequest.java @@ -48,6 +48,7 @@ import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; @@ -267,7 +268,7 @@ public PutIndexTemplateRequest mapping(Map source) { private PutIndexTemplateRequest internalMapping(Map source) { try { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.map(source); MediaType mediaType = builder.contentType(); Objects.requireNonNull(mediaType); diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutMappingRequest.java b/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutMappingRequest.java index 721d6094f7502..6d7e95d191ba6 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutMappingRequest.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutMappingRequest.java @@ -38,7 +38,6 @@ import org.opensearch.client.TimedRequest; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.MediaType; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; @@ -111,7 +110,7 @@ public MediaType mediaType() { */ public PutMappingRequest source(Map mappingSource) { try { - XContentBuilder builder = XContentFactory.contentBuilder(MediaTypeRegistry.getDefaultMediaType()); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(MediaTypeRegistry.getDefaultMediaType()); builder.map(mappingSource); return source(builder); } catch (IOException e) { diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/AbstractRequestTestCase.java b/client/rest-high-level/src/test/java/org/opensearch/client/AbstractRequestTestCase.java index 707f4246009aa..e9b297d21bcba 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/AbstractRequestTestCase.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/AbstractRequestTestCase.java @@ -36,7 +36,6 @@ import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContent; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.test.OpenSearchTestCase; @@ -60,7 +59,7 @@ public final void testFromXContent() throws IOException { final XContentType xContentType = randomFrom(XContentType.values()); final BytesReference bytes = toShuffledXContent(clientTestInstance, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean()); - final XContent xContent = XContentFactory.xContent(xContentType); + final XContent xContent = xContentType.xContent(); final XContentParser parser = xContent.createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, bytes.streamInput()); final S serverInstance = doParseToServerInstance(parser); assertInstances(serverInstance, clientTestInstance); diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java b/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java index 5f7df0cd5860c..b5ee31c87b8d4 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/AbstractResponseTestCase.java @@ -36,7 +36,6 @@ import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContent; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.test.OpenSearchTestCase; @@ -59,7 +58,7 @@ public final void testFromXContent() throws IOException { final S serverTestInstance = createServerTestInstance(xContentType); final BytesReference bytes = toShuffledXContent(serverTestInstance, xContentType, getParams(), randomBoolean()); - final XContent xContent = XContentFactory.xContent(xContentType); + final XContent xContent = xContentType.xContent(); final XContentParser parser = xContent.createParser( NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/core/GetSourceResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/core/GetSourceResponseTests.java index 03d6e6720ffed..376eb3c5e407c 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/core/GetSourceResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/core/GetSourceResponseTests.java @@ -35,9 +35,9 @@ import org.opensearch.client.AbstractResponseTestCase; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -61,7 +61,7 @@ static class SourceOnlyResponse implements ToXContentObject { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { // this implementation copied from RestGetSourceAction.RestGetSourceResponseListener::buildResponse try (InputStream stream = source.streamInput()) { - builder.rawValue(stream, XContentHelper.xContentType(source)); + builder.rawValue(stream, MediaTypeRegistry.xContentType(source)); } return builder; } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/CloseIndexResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/CloseIndexResponseTests.java index b83bdab899a41..7babb339e176a 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/CloseIndexResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/CloseIndexResponseTests.java @@ -39,7 +39,6 @@ import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContent; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.index.Index; @@ -194,7 +193,7 @@ public final void testBwcFromXContent() throws IOException { final XContentType xContentType = randomFrom(XContentType.values()); final BytesReference bytes = toShuffledXContent(expected, xContentType, getParams(), randomBoolean()); - final XContent xContent = XContentFactory.xContent(xContentType); + final XContent xContent = xContentType.xContent(); final XContentParser parser = xContent.createParser( NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, @@ -215,7 +214,7 @@ public final void testBwcFromXContent() throws IOException { final XContentType xContentType = randomFrom(XContentType.values()); final BytesReference bytes = toShuffledXContent(expected, xContentType, getParams(), randomBoolean()); - final XContent xContent = XContentFactory.xContent(xContentType); + final XContent xContent = xContentType.xContent(); final XContentParser parser = xContent.createParser( NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java index 6f924898096a0..8c83791d94d42 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java @@ -39,10 +39,10 @@ import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -262,7 +262,7 @@ private static AliasMetadata randomAliasMetadata(String name) { } static XContentBuilder randomMapping(String type, XContentType xContentType) throws IOException { - XContentBuilder builder = XContentFactory.contentBuilder(xContentType); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(xContentType); builder.startObject().startObject(type); randomMappingFields(builder, true); diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/MediaType.java b/libs/core/src/main/java/org/opensearch/core/xcontent/MediaType.java index 7193cd3bd97bb..8e3c115c7ba58 100644 --- a/libs/core/src/main/java/org/opensearch/core/xcontent/MediaType.java +++ b/libs/core/src/main/java/org/opensearch/core/xcontent/MediaType.java @@ -34,6 +34,8 @@ import org.opensearch.core.common.io.stream.Writeable; +import java.io.IOException; +import java.io.OutputStream; import java.util.Locale; /** @@ -69,12 +71,20 @@ default String typeWithSubtype() { XContent xContent(); + boolean detectedXContent(final byte[] bytes, int offset, int length); + + boolean detectedXContent(final CharSequence content, final int length); + default String mediaType() { return mediaTypeWithoutParameters(); } String mediaTypeWithoutParameters(); + XContentBuilder contentBuilder() throws IOException; + + XContentBuilder contentBuilder(final OutputStream os) throws IOException; + /** * Accepts a format string, which is most of the time is equivalent to {@link MediaType#subtype()} * and attempts to match the value to an {@link MediaType}. diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeRegistry.java b/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeRegistry.java index b81325f6c7c74..064f154602caf 100644 --- a/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeRegistry.java +++ b/libs/core/src/main/java/org/opensearch/core/xcontent/MediaTypeRegistry.java @@ -32,8 +32,15 @@ package org.opensearch.core.xcontent; +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.spi.MediaTypeProvider; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.UncheckedIOException; + import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -54,6 +61,7 @@ public final class MediaTypeRegistry { // Default mediaType singleton private static MediaType DEFAULT_MEDIA_TYPE; + public static final int GUESS_HEADER_LENGTH = 20; // JSON is a core type, so we create a static instance for implementations that require JSON format (e.g., tests) // todo we should explore moving the concrete JSON implementation from the xcontent library to core @@ -118,6 +126,213 @@ public static MediaType fromFormat(String format) { return formatToMediaType.get(format.toLowerCase(Locale.ROOT)); } + /** + * Returns a binary content builder for the provided content type. + */ + public static XContentBuilder contentBuilder(MediaType type) throws IOException { + for (var mediaType : formatToMediaType.values()) { + if (type == mediaType) { + return type.contentBuilder(); + } + } + throw new IllegalArgumentException("No matching content type for " + type); + } + + public static XContentBuilder contentBuilder(MediaType type, OutputStream outputStream) throws IOException { + for (var mediaType : formatToMediaType.values()) { + if (type == mediaType) { + return type.contentBuilder(outputStream); + } + } + throw new IllegalArgumentException("No matching content type for " + type); + } + + /** + * Guesses the content (type) based on the provided char sequence and returns the corresponding {@link XContent} + * + * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type. + * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed. + * This method is deprecated to prevent usages of it from spreading further without specific reasons. + */ + @Deprecated + public static MediaType xContent(final byte[] data, int offset, int length) { + MediaType type = mediaTypeFromBytes(data, offset, length); + if (type == null) { + throw new XContentParseException("Failed to derive xcontent"); + } + return type; + } + + /** + * Guesses the content type based on the provided bytes and returns the corresponding {@link XContent} + * + * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type. + * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed. + * This method is deprecated to prevent usages of it from spreading further without specific reasons. + */ + @Deprecated + public static MediaType xContent(byte[] data) { + return xContent(data, 0, data.length); + } + + /** + * Guesses the content (type) based on the provided char sequence and returns the corresponding {@link XContent} + * + * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type. + * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed. + * This method is deprecated to prevent usages of it from spreading further without specific reasons. + */ + @Deprecated + public static MediaType xContent(CharSequence content) { + MediaType type = xContentType(content); + if (type == null) { + throw new XContentParseException("Failed to derive xcontent"); + } + return type; + } + + /** + * Guesses the content type based on the provided char sequence. + * + * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type. + * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed. + * This method is deprecated to prevent usages of it from spreading further without specific reasons. + */ + @Deprecated + public static MediaType xContentType(CharSequence content) { + int length = content.length() < GUESS_HEADER_LENGTH ? content.length() : GUESS_HEADER_LENGTH; + if (length == 0) { + return null; + } + for (var mediaType : formatToMediaType.values()) { + if (mediaType.detectedXContent(content, length)) { + return mediaType; + } + } + + // fallback for json + for (int i = 0; i < length; i++) { + char c = content.charAt(i); + if (c == '{') { + return MediaType.fromMediaType("application/json"); + } + if (Character.isWhitespace(c) == false) { + break; + } + } + return null; + } + + /** + * Guesses the content type based on the provided input stream without consuming it. + * + * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type. + * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed. + * This method is deprecated to prevent usages of it from spreading further without specific reasons. + */ + @Deprecated + public static MediaType xContentType(InputStream si) throws IOException { + /* + * We need to guess the content type. To do this, we look for the first non-whitespace character and then try to guess the content + * type on the GUESS_HEADER_LENGTH bytes that follow. We do this in a way that does not modify the initial read position in the + * underlying input stream. This is why the input stream must support mark/reset and why we repeatedly mark the read position and + * reset. + */ + if (si.markSupported() == false) { + throw new IllegalArgumentException("Cannot guess the xcontent type without mark/reset support on " + si.getClass()); + } + si.mark(Integer.MAX_VALUE); + try { + // scan until we find the first non-whitespace character or the end of the stream + int current; + do { + current = si.read(); + if (current == -1) { + return null; + } + } while (Character.isWhitespace((char) current)); + // now guess the content type off the next GUESS_HEADER_LENGTH bytes including the current byte + final byte[] firstBytes = new byte[GUESS_HEADER_LENGTH]; + firstBytes[0] = (byte) current; + int read = 1; + while (read < GUESS_HEADER_LENGTH) { + final int r = si.read(firstBytes, read, GUESS_HEADER_LENGTH - read); + if (r == -1) { + break; + } + read += r; + } + return mediaTypeFromBytes(firstBytes, 0, read); + } finally { + si.reset(); + } + + } + + /** + * Guesses the content type based on the provided bytes. + * + * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type. + * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed. + * This method is deprecated to prevent usages of it from spreading further without specific reasons. + */ + @Deprecated + public static MediaType xContentType(BytesReference bytes) { + if (bytes instanceof BytesArray) { + final BytesArray array = (BytesArray) bytes; + return mediaTypeFromBytes(array.array(), array.offset(), array.length()); + } + try { + final InputStream inputStream = bytes.streamInput(); + assert inputStream.markSupported(); + return xContentType(inputStream); + } catch (IOException e) { + assert false : "Should not happen, we're just reading bytes from memory"; + throw new UncheckedIOException(e); + } + } + + /** + * Guesses the content type based on the provided bytes. + * + * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type. + * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed. + * This method is deprecated to prevent usages of it from spreading further without specific reasons. + */ + @Deprecated + public static MediaType mediaTypeFromBytes(final byte[] data, int offset, int length) { + int totalLength = data.length; + if (totalLength == 0 || length == 0) { + return null; + } else if ((offset + length) > totalLength) { + return null; + } + for (var mediaType : formatToMediaType.values()) { + if (mediaType.detectedXContent(data, offset, length)) { + return mediaType; + } + } + + // a last chance for JSON + int jsonStart = 0; + // JSON may be preceded by UTF-8 BOM + if (length > 3 && data[offset] == (byte) 0xEF && data[offset + 1] == (byte) 0xBB && data[offset + 2] == (byte) 0xBF) { + jsonStart = 3; + } + + for (int i = jsonStart; i < length; i++) { + byte b = data[offset + i]; + if (b == '{') { + return fromMediaType("application/json"); + } + if (Character.isWhitespace(b) == false) { + break; + } + } + + return null; + } + /** * parsing media type that follows https://tools.ietf.org/html/rfc7231#section-3.1.1.1 * @param headerValue a header value from Accept or Content-Type diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentFactory.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentFactory.java index 76a2046dd768a..9f423bc9abad3 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentFactory.java +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentFactory.java @@ -32,19 +32,14 @@ package org.opensearch.common.xcontent; -import com.fasterxml.jackson.dataformat.cbor.CBORConstants; -import com.fasterxml.jackson.dataformat.smile.SmileConstants; -import org.opensearch.common.xcontent.cbor.CborXContent; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.xcontent.smile.SmileXContent; import org.opensearch.common.xcontent.yaml.YamlXContent; -import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.core.xcontent.XContentParseException; import java.io.IOException; -import java.io.InputStream; import java.io.OutputStream; /** @@ -52,13 +47,11 @@ */ public class XContentFactory { - static final int GUESS_HEADER_LENGTH = 20; - /** * Returns a content builder using JSON format ({@link org.opensearch.common.xcontent.XContentType#JSON}. */ public static XContentBuilder jsonBuilder() throws IOException { - return contentBuilder(XContentType.JSON); + return MediaTypeRegistry.contentBuilder(XContentType.JSON); } /** @@ -72,7 +65,7 @@ public static XContentBuilder jsonBuilder(OutputStream os) throws IOException { * Returns a content builder using SMILE format ({@link org.opensearch.common.xcontent.XContentType#SMILE}. */ public static XContentBuilder smileBuilder() throws IOException { - return contentBuilder(XContentType.SMILE); + return MediaTypeRegistry.contentBuilder(XContentType.SMILE); } /** @@ -86,7 +79,7 @@ public static XContentBuilder smileBuilder(OutputStream os) throws IOException { * Returns a content builder using YAML format ({@link org.opensearch.common.xcontent.XContentType#YAML}. */ public static XContentBuilder yamlBuilder() throws IOException { - return contentBuilder(XContentType.YAML); + return MediaTypeRegistry.contentBuilder(XContentType.YAML); } /** @@ -100,271 +93,6 @@ public static XContentBuilder yamlBuilder(OutputStream os) throws IOException { * Returns a content builder using CBOR format ({@link org.opensearch.common.xcontent.XContentType#CBOR}. */ public static XContentBuilder cborBuilder() throws IOException { - return contentBuilder(XContentType.CBOR); - } - - /** - * Constructs a new cbor builder that will output the result into the provided output stream. - */ - public static XContentBuilder cborBuilder(OutputStream os) throws IOException { - return new XContentBuilder(CborXContent.cborXContent, os); - } - - /** - * Constructs a xcontent builder that will output the result into the provided output stream. - */ - public static XContentBuilder contentBuilder(MediaType type, OutputStream outputStream) throws IOException { - if (type == XContentType.JSON) { - return jsonBuilder(outputStream); - } else if (type == XContentType.SMILE) { - return smileBuilder(outputStream); - } else if (type == XContentType.YAML) { - return yamlBuilder(outputStream); - } else if (type == XContentType.CBOR) { - return cborBuilder(outputStream); - } - throw new IllegalArgumentException("No matching content type for " + type); - } - - /** - * Returns a binary content builder for the provided media type. - */ - public static XContentBuilder contentBuilder(MediaType type) throws IOException { - if (type instanceof XContentType) { - return contentBuilder((XContentType) (type)); - } - throw new IllegalArgumentException("Content type [" + type.getClass().getName() + "] not supported"); - } - - /** - * Returns a binary content builder for the provided content type. - */ - public static XContentBuilder contentBuilder(XContentType type) throws IOException { - if (type == XContentType.JSON) { - return JsonXContent.contentBuilder(); - } else if (type == XContentType.SMILE) { - return SmileXContent.contentBuilder(); - } else if (type == XContentType.YAML) { - return YamlXContent.contentBuilder(); - } else if (type == XContentType.CBOR) { - return CborXContent.contentBuilder(); - } - throw new IllegalArgumentException("No matching content type for " + type); - } - - /** - * Returns the {@link XContent} for the provided content type. - */ - public static XContent xContent(MediaType type) { - if (type == null) { - throw new IllegalArgumentException("Cannot get xcontent for unknown type"); - } - return type.xContent(); - } - - /** - * Guesses the content type based on the provided char sequence. - * - * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type. - * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed. - * This method is deprecated to prevent usages of it from spreading further without specific reasons. - */ - @Deprecated - public static XContentType xContentType(CharSequence content) { - int length = content.length() < GUESS_HEADER_LENGTH ? content.length() : GUESS_HEADER_LENGTH; - if (length == 0) { - return null; - } - char first = content.charAt(0); - if (first == '{') { - return XContentType.JSON; - } - // Should we throw a failure here? Smile idea is to use it in bytes.... - if (length > 2 - && first == SmileConstants.HEADER_BYTE_1 - && content.charAt(1) == SmileConstants.HEADER_BYTE_2 - && content.charAt(2) == SmileConstants.HEADER_BYTE_3) { - return XContentType.SMILE; - } - if (length > 2 && first == '-' && content.charAt(1) == '-' && content.charAt(2) == '-') { - return XContentType.YAML; - } - - // CBOR is not supported - - for (int i = 0; i < length; i++) { - char c = content.charAt(i); - if (c == '{') { - return XContentType.JSON; - } - if (Character.isWhitespace(c) == false) { - break; - } - } - return null; - } - - /** - * Guesses the content (type) based on the provided char sequence and returns the corresponding {@link XContent} - * - * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type. - * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed. - * This method is deprecated to prevent usages of it from spreading further without specific reasons. - */ - @Deprecated - public static XContent xContent(CharSequence content) { - XContentType type = xContentType(content); - if (type == null) { - throw new XContentParseException("Failed to derive xcontent"); - } - return xContent(type); - } - - /** - * Guesses the content type based on the provided bytes and returns the corresponding {@link XContent} - * - * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type. - * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed. - * This method is deprecated to prevent usages of it from spreading further without specific reasons. - */ - @Deprecated - public static XContent xContent(byte[] data) { - return xContent(data, 0, data.length); - } - - /** - * Guesses the content type based on the provided bytes and returns the corresponding {@link XContent} - * - * @deprecated guessing the content type should not be needed ideally. We should rather know the content type upfront or read it - * from headers. Till we fixed the REST layer to read the Content-Type header, that should be the only place where guessing is needed. - */ - @Deprecated - public static XContent xContent(byte[] data, int offset, int length) { - XContentType type = xContentType(data, offset, length); - if (type == null) { - throw new XContentParseException("Failed to derive xcontent"); - } - return xContent(type); - } - - /** - * Guesses the content type based on the provided input stream without consuming it. - * - * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type. - * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed. - * This method is deprecated to prevent usages of it from spreading further without specific reasons. - */ - @Deprecated - public static XContentType xContentType(InputStream si) throws IOException { - /* - * We need to guess the content type. To do this, we look for the first non-whitespace character and then try to guess the content - * type on the GUESS_HEADER_LENGTH bytes that follow. We do this in a way that does not modify the initial read position in the - * underlying input stream. This is why the input stream must support mark/reset and why we repeatedly mark the read position and - * reset. - */ - if (si.markSupported() == false) { - throw new IllegalArgumentException("Cannot guess the xcontent type without mark/reset support on " + si.getClass()); - } - si.mark(Integer.MAX_VALUE); - try { - // scan until we find the first non-whitespace character or the end of the stream - int current; - do { - current = si.read(); - if (current == -1) { - return null; - } - } while (Character.isWhitespace((char) current)); - // now guess the content type off the next GUESS_HEADER_LENGTH bytes including the current byte - final byte[] firstBytes = new byte[GUESS_HEADER_LENGTH]; - firstBytes[0] = (byte) current; - int read = 1; - while (read < GUESS_HEADER_LENGTH) { - final int r = si.read(firstBytes, read, GUESS_HEADER_LENGTH - read); - if (r == -1) { - break; - } - read += r; - } - return xContentType(firstBytes, 0, read); - } finally { - si.reset(); - } - - } - - /** - * Guesses the content type based on the provided bytes. - * - * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type. - * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed. - * This method is deprecated to prevent usages of it from spreading further without specific reasons. - */ - @Deprecated - public static XContentType xContentType(byte[] bytes) { - return xContentType(bytes, 0, bytes.length); - } - - /** - * Guesses the content type based on the provided bytes. - * - * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type. - * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed. - * This method is deprecated to prevent usages of it from spreading further without specific reasons. - */ - @Deprecated - public static XContentType xContentType(byte[] bytes, int offset, int length) { - int totalLength = bytes.length; - if (totalLength == 0 || length == 0) { - return null; - } else if ((offset + length) > totalLength) { - return null; - } - byte first = bytes[offset]; - if (first == '{') { - return XContentType.JSON; - } - if (length > 2 - && first == SmileConstants.HEADER_BYTE_1 - && bytes[offset + 1] == SmileConstants.HEADER_BYTE_2 - && bytes[offset + 2] == SmileConstants.HEADER_BYTE_3) { - return XContentType.SMILE; - } - if (length > 2 && first == '-' && bytes[offset + 1] == '-' && bytes[offset + 2] == '-') { - return XContentType.YAML; - } - // CBOR logic similar to CBORFactory#hasCBORFormat - if (first == CBORConstants.BYTE_OBJECT_INDEFINITE && length > 1) { - return XContentType.CBOR; - } - if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, first) && length > 2) { - // Actually, specific "self-describe tag" is a very good indicator - if (first == (byte) 0xD9 && bytes[offset + 1] == (byte) 0xD9 && bytes[offset + 2] == (byte) 0xF7) { - return XContentType.CBOR; - } - } - // for small objects, some encoders just encode as major type object, we can safely - // say its CBOR since it doesn't contradict SMILE or JSON, and its a last resort - if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, first)) { - return XContentType.CBOR; - } - - int jsonStart = 0; - // JSON may be preceded by UTF-8 BOM - if (length > 3 && first == (byte) 0xEF && bytes[offset + 1] == (byte) 0xBB && bytes[offset + 2] == (byte) 0xBF) { - jsonStart = 3; - } - - // a last chance for JSON - for (int i = jsonStart; i < length; i++) { - byte b = bytes[offset + i]; - if (b == '{') { - return XContentType.JSON; - } - if (Character.isWhitespace(b) == false) { - break; - } - } - return null; + return MediaTypeRegistry.contentBuilder(XContentType.CBOR); } } diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java index d418e25ba8292..7026dbd5e78c3 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentType.java @@ -32,6 +32,8 @@ package org.opensearch.common.xcontent; +import com.fasterxml.jackson.dataformat.cbor.CBORConstants; +import com.fasterxml.jackson.dataformat.smile.SmileConstants; import org.opensearch.common.xcontent.cbor.CborXContent; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.xcontent.smile.SmileXContent; @@ -39,8 +41,10 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.xcontent.MediaType; import org.opensearch.core.xcontent.XContent; +import org.opensearch.core.xcontent.XContentBuilder; import java.io.IOException; +import java.io.OutputStream; /** * The content type of {@link XContent}. @@ -70,6 +74,26 @@ public String subtype() { public XContent xContent() { return JsonXContent.jsonXContent; } + + @Override + public boolean detectedXContent(final byte[] bytes, int offset, int length) { + return bytes[offset] == '{'; + } + + @Override + public boolean detectedXContent(final CharSequence content, final int length) { + return content.charAt(0) == '{'; + } + + @Override + public XContentBuilder contentBuilder() throws IOException { + return JsonXContent.contentBuilder(); + } + + @Override + public XContentBuilder contentBuilder(final OutputStream os) throws IOException { + return new XContentBuilder(JsonXContent.jsonXContent, os); + } }, /** * The jackson based smile binary format. Fast and compact binary format. @@ -89,6 +113,32 @@ public String subtype() { public XContent xContent() { return SmileXContent.smileXContent; } + + @Override + public boolean detectedXContent(final byte[] bytes, int offset, int length) { + return length > 2 + && bytes[offset] == SmileConstants.HEADER_BYTE_1 + && bytes[offset + 1] == SmileConstants.HEADER_BYTE_2 + && bytes[offset + 2] == SmileConstants.HEADER_BYTE_3; + } + + @Override + public boolean detectedXContent(final CharSequence content, final int length) { + return length > 2 + && content.charAt(0) == SmileConstants.HEADER_BYTE_1 + && content.charAt(1) == SmileConstants.HEADER_BYTE_2 + && content.charAt(2) == SmileConstants.HEADER_BYTE_3; + } + + @Override + public XContentBuilder contentBuilder() throws IOException { + return SmileXContent.contentBuilder(); + } + + @Override + public XContentBuilder contentBuilder(final OutputStream os) throws IOException { + return new XContentBuilder(SmileXContent.smileXContent, os); + } }, /** * A YAML based content type. @@ -108,6 +158,26 @@ public String subtype() { public XContent xContent() { return YamlXContent.yamlXContent; } + + @Override + public boolean detectedXContent(final byte[] bytes, int offset, int length) { + return length > 2 && bytes[offset] == '-' && bytes[offset + 1] == '-' && bytes[offset + 2] == '-'; + } + + @Override + public boolean detectedXContent(final CharSequence content, final int length) { + return length > 2 && content.charAt(0) == '-' && content.charAt(1) == '-' && content.charAt(2) == '-'; + } + + @Override + public XContentBuilder contentBuilder() throws IOException { + return YamlXContent.contentBuilder(); + } + + @Override + public XContentBuilder contentBuilder(final OutputStream os) throws IOException { + return new XContentBuilder(YamlXContent.yamlXContent, os); + } }, /** * A CBOR based content type. @@ -127,6 +197,41 @@ public String subtype() { public XContent xContent() { return CborXContent.cborXContent; } + + @Override + public boolean detectedXContent(final byte[] bytes, int offset, int length) { + // CBOR logic similar to CBORFactory#hasCBORFormat + if (bytes[offset] == CBORConstants.BYTE_OBJECT_INDEFINITE && length > 1) { + return true; + } + if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, bytes[offset]) && length > 2) { + // Actually, specific "self-describe tag" is a very good indicator + if (bytes[offset] == (byte) 0xD9 && bytes[offset + 1] == (byte) 0xD9 && bytes[offset + 2] == (byte) 0xF7) { + return true; + } + } + // for small objects, some encoders just encode as major type object, we can safely + // say its CBOR since it doesn't contradict SMILE or JSON, and its a last resort + if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, bytes[offset])) { + return true; + } + return false; + } + + @Override + public boolean detectedXContent(final CharSequence content, final int length) { + return false; + } + + @Override + public XContentBuilder contentBuilder() throws IOException { + return CborXContent.contentBuilder(); + } + + @Override + public XContentBuilder contentBuilder(final OutputStream os) throws IOException { + return new XContentBuilder(CborXContent.cborXContent, os); + } }; private int index; diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContentGenerator.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContentGenerator.java index 9164b5216f9a5..3d4bbba9cc50c 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContentGenerator.java +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContentGenerator.java @@ -43,9 +43,9 @@ import com.fasterxml.jackson.core.util.JsonGeneratorDelegate; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContent; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentGenerator; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -339,7 +339,7 @@ public void writeRawField(String name, InputStream content) throws IOException { // needed for the XContentFactory.xContentType call content = new BufferedInputStream(content); } - XContentType contentType = XContentFactory.xContentType(content); + MediaType contentType = MediaTypeRegistry.xContentType(content); if (contentType == null) { throw new IllegalArgumentException("Can't write raw bytes whose xcontent-type can't be guessed"); } @@ -354,7 +354,7 @@ public void writeRawField(String name, InputStream content, MediaType mediaType) if (mayWriteRawData(mediaType) == false) { // EMPTY is safe here because we never call namedObject when writing raw data try ( - XContentParser parser = XContentFactory.xContent(mediaType) + XContentParser parser = mediaType.xContent() // It's okay to pass the throwing deprecation handler // because we should not be writing raw fields when // generating JSON diff --git a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentParser.java b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentParser.java index b189c8ed8905e..87598aa28ce8f 100644 --- a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentParser.java +++ b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/UserAgentParser.java @@ -35,7 +35,6 @@ import org.opensearch.OpenSearchParseException; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import java.io.IOException; @@ -68,7 +67,7 @@ final class UserAgentParser { private void init(InputStream regexStream) throws IOException { // EMPTY is safe here because we don't use namedObject - XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML) + XContentParser yamlParser = XContentType.YAML.xContent() .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, regexStream); XContentParser.Token token = yamlParser.nextToken(); diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateResponse.java index 6e56ecf3950bb..da67a0d2dd13a 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateResponse.java @@ -40,8 +40,8 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.StatusToXContentObject; import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.rest.RestStatus; @@ -104,11 +104,11 @@ public static SearchTemplateResponse fromXContent(XContentParser parser) throws if (contentAsMap.containsKey(TEMPLATE_OUTPUT_FIELD.getPreferredName())) { Object source = contentAsMap.get(TEMPLATE_OUTPUT_FIELD.getPreferredName()); - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).value(source); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON).value(source); searchTemplateResponse.setSource(BytesReference.bytes(builder)); } else { MediaType contentType = parser.contentType(); - XContentBuilder builder = XContentFactory.contentBuilder(contentType).map(contentAsMap); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(contentType).map(contentAsMap); XContentParser searchResponseParser = contentType.xContent() .createParser(parser.getXContentRegistry(), parser.getDeprecationHandler(), BytesReference.bytes(builder).streamInput()); diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java index bbda8d15d9d41..f6b0cc4eecf9a 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java @@ -42,7 +42,6 @@ import org.opensearch.common.inject.Inject; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.rest.action.search.RestSearchAction; @@ -132,8 +131,7 @@ static SearchRequest convert( } try ( - XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, source) + XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, source) ) { SearchSourceBuilder builder = SearchSourceBuilder.searchSource(); builder.parseXContent(parser, false); diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestXContentTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestXContentTests.java index 664e87da0a7d8..e3669ef67ff36 100644 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestXContentTests.java +++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestXContentTests.java @@ -33,9 +33,9 @@ package org.opensearch.script.mustache; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParseException; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -101,7 +101,7 @@ public void testToXContentWithInlineTemplate() throws IOException { request.setScriptParams(scriptParams); XContentType contentType = randomFrom(XContentType.values()); - XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType) + XContentBuilder expectedRequest = MediaTypeRegistry.contentBuilder(contentType) .startObject() .field("source", "{\"query\": { \"match\" : { \"{{my_field}}\" : \"{{my_value}}\" } } }") .startObject("params") @@ -112,7 +112,7 @@ public void testToXContentWithInlineTemplate() throws IOException { .field("profile", true) .endObject(); - XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType); + XContentBuilder actualRequest = MediaTypeRegistry.contentBuilder(contentType); request.toXContent(actualRequest, ToXContent.EMPTY_PARAMS); assertToXContentEquivalent(BytesReference.bytes(expectedRequest), BytesReference.bytes(actualRequest), contentType); @@ -131,7 +131,7 @@ public void testToXContentWithStoredTemplate() throws IOException { request.setScriptParams(params); XContentType contentType = randomFrom(XContentType.values()); - XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType) + XContentBuilder expectedRequest = MediaTypeRegistry.contentBuilder(contentType) .startObject() .field("id", "match_template") .startObject("params") @@ -142,7 +142,7 @@ public void testToXContentWithStoredTemplate() throws IOException { .field("profile", false) .endObject(); - XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType); + XContentBuilder actualRequest = MediaTypeRegistry.contentBuilder(contentType); request.toXContent(actualRequest, ToXContent.EMPTY_PARAMS); assertToXContentEquivalent(BytesReference.bytes(expectedRequest), BytesReference.bytes(actualRequest), contentType); diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java index 804a72561c10e..fd0a4e9612a8f 100644 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateResponseTests.java @@ -36,6 +36,7 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.ShardSearchFailure; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -164,7 +165,7 @@ public void testSourceToXContent() throws IOException { response.setSource(BytesReference.bytes(source)); XContentType contentType = randomFrom(XContentType.values()); - XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType) + XContentBuilder expectedResponse = MediaTypeRegistry.contentBuilder(contentType) .startObject() .startObject("template_output") .startObject("query") @@ -175,7 +176,7 @@ public void testSourceToXContent() throws IOException { .endObject() .endObject(); - XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType); + XContentBuilder actualResponse = MediaTypeRegistry.contentBuilder(contentType); response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS); assertToXContentEquivalent(BytesReference.bytes(expectedResponse), BytesReference.bytes(actualResponse), contentType); @@ -210,7 +211,7 @@ public void testSearchResponseToXContent() throws IOException { response.setResponse(searchResponse); XContentType contentType = randomFrom(XContentType.values()); - XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType) + XContentBuilder expectedResponse = MediaTypeRegistry.contentBuilder(contentType) .startObject() .field("took", 0) .field("timed_out", false) @@ -235,7 +236,7 @@ public void testSearchResponseToXContent() throws IOException { .endObject() .endObject(); - XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType); + XContentBuilder actualResponse = MediaTypeRegistry.contentBuilder(contentType); response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS); assertToXContentEquivalent(BytesReference.bytes(expectedResponse), BytesReference.bytes(actualResponse), contentType); diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java index d5b61d5c5a517..3500086564719 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java @@ -71,6 +71,8 @@ import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.ParseField; import org.opensearch.core.xcontent.ConstructingObjectParser; +import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; @@ -123,7 +125,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder documents; - private final XContentType documentXContentType; + private final MediaType documentXContentType; private final String indexedDocumentIndex; private final String indexedDocumentId; @@ -150,7 +152,7 @@ public PercolateQueryBuilder(String field, BytesReference document, XContentType * @param documents The binary blob containing document to percolate * @param documentXContentType The content type of the binary blob containing the document to percolate */ - public PercolateQueryBuilder(String field, List documents, XContentType documentXContentType) { + public PercolateQueryBuilder(String field, List documents, MediaType documentXContentType) { if (field == null) { throw new IllegalArgumentException("[field] is a required argument"); } @@ -252,7 +254,11 @@ protected PercolateQueryBuilder(String field, Supplier documentS } documents = in.readList(StreamInput::readBytesReference); if (documents.isEmpty() == false) { - documentXContentType = in.readEnum(XContentType.class); + if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + documentXContentType = in.readMediaType(); + } else { + documentXContentType = in.readEnum(XContentType.class); + } } else { documentXContentType = null; } @@ -298,7 +304,11 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeBytesReference(document); } if (documents.isEmpty() == false) { - out.writeEnum(documentXContentType); + if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + documentXContentType.writeTo(out); + } else { + out.writeEnum((XContentType) documentXContentType); + } } } @@ -432,7 +442,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) { PercolateQueryBuilder rewritten = new PercolateQueryBuilder( field, Collections.singletonList(source), - XContentHelper.xContentType(source) + MediaTypeRegistry.xContentType(source) ); if (name != null) { rewritten.setName(name); @@ -560,7 +570,7 @@ public List getDocuments() { } // pkg-private for testing - XContentType getXContentType() { + MediaType getXContentType() { return documentXContentType; } diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java index ea001de0ee7c6..d97d5a3a7dcd5 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java @@ -36,9 +36,9 @@ import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParseException; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -274,7 +274,7 @@ public static DiscountedCumulativeGain createTestItem() { public void testXContentRoundtrip() throws IOException { DiscountedCumulativeGain testItem = createTestItem(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS)); try (XContentParser itemParser = createParser(shuffled)) { itemParser.nextToken(); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java index 014f52faa9d57..32c7edc845e32 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/ExpectedReciprocalRankTests.java @@ -35,9 +35,9 @@ import org.opensearch.action.OriginalIndices; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParseException; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -166,7 +166,7 @@ public static ExpectedReciprocalRank createTestItem() { public void testXContentRoundtrip() throws IOException { ExpectedReciprocalRank testItem = createTestItem(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS)); try (XContentParser itemParser = createParser(shuffled)) { itemParser.nextToken(); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java index 3df79acfa6ce1..d34e6ba8bd72e 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/MeanReciprocalRankTests.java @@ -35,9 +35,9 @@ import org.opensearch.action.OriginalIndices; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParseException; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -183,7 +183,7 @@ public void testNoResults() throws Exception { public void testXContentRoundtrip() throws IOException { MeanReciprocalRank testItem = createTestItem(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS)); try (XContentParser itemParser = createParser(shuffled)) { itemParser.nextToken(); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java index 3317a2d2f00f1..41340cfd1003d 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/PrecisionAtKTests.java @@ -35,9 +35,9 @@ import org.opensearch.action.OriginalIndices; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParseException; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -199,7 +199,7 @@ public static PrecisionAtK createTestItem() { public void testXContentRoundtrip() throws IOException { PrecisionAtK testItem = createTestItem(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS)); try (XContentParser itemParser = createParser(shuffled)) { itemParser.nextToken(); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java index 7c0590566bba9..d027026a6b317 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java @@ -44,9 +44,9 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentLocation; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -177,7 +177,7 @@ public void testToXContent() throws IOException { Collections.singletonMap("coffee_query", coffeeQueryQuality), Collections.singletonMap("beer_query", new ParsingException(new XContentLocation(0, 0), "someMsg")) ); - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); String xContent = BytesReference.bytes(response.toXContent(builder, ToXContent.EMPTY_PARAMS)).utf8ToString(); assertEquals( ("{" diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedDocumentTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedDocumentTests.java index 988784b6e57a3..01f5a3a12da01 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedDocumentTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedDocumentTests.java @@ -34,9 +34,9 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParseException; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -57,7 +57,7 @@ public static RatedDocument createRatedDocument() { public void testXContentParsing() throws IOException { RatedDocument testItem = createRatedDocument(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS)); try (XContentParser itemParser = createParser(shuffled)) { RatedDocument parsedItem = RatedDocument.fromXContent(itemParser); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedRequestsTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedRequestsTests.java index ac2401f30e6f0..6e99e31a2b819 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedRequestsTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RatedRequestsTests.java @@ -35,10 +35,10 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParseException; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -134,7 +134,7 @@ public static RatedRequest createTestItem(boolean forceRequest) { public void testXContentRoundtrip() throws IOException { RatedRequest testItem = createTestItem(randomBoolean()); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS)); try (XContentParser itemParser = createParser(shuffled)) { itemParser.nextToken(); diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java index 85e024f6bb1e9..37f778fbc5059 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RecallAtKTests.java @@ -35,9 +35,9 @@ import org.opensearch.action.OriginalIndices; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParseException; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -185,7 +185,7 @@ public static RecallAtK createTestItem() { public void testXContentRoundtrip() throws IOException { RecallAtK testItem = createTestItem(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS)); try (XContentParser itemParser = createParser(shuffled)) { itemParser.nextToken(); diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractBulkByQueryRestHandler.java index 6f36fafd852ef..4b1e416ac63ea 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractBulkByQueryRestHandler.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractBulkByQueryRestHandler.java @@ -37,8 +37,8 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.search.RestSearchAction; @@ -106,7 +106,7 @@ private XContentParser extractRequestSpecificFields(RestRequest restRequest, Map } try ( XContentParser parser = restRequest.contentOrSourceParamParser(); - XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()) + XContentBuilder builder = MediaTypeRegistry.contentBuilder(parser.contentType()) ) { Map body = parser.map(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java index e554a3be20528..fe62f5902cbba 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/FiltersAggsRewriteIT.java @@ -35,8 +35,8 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.settings.Settings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.WrapperQueryBuilder; import org.opensearch.search.aggregations.bucket.filter.FiltersAggregationBuilder; @@ -59,7 +59,7 @@ public void testWrapperQueryIsRewritten() throws IOException { XContentType xContentType = randomFrom(XContentType.values()); BytesReference bytesReference; - try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(xContentType)) { builder.startObject(); { builder.startObject("terms"); diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index 5da3f2eb01260..3bb552b80084d 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -44,6 +44,7 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -387,7 +388,7 @@ public CreateSnapshotRequest settings(String source, MediaType mediaType) { */ public CreateSnapshotRequest settings(Map source) { try { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.map(source); settings(Strings.toString(builder), builder.contentType()); } catch (IOException e) { diff --git a/server/src/main/java/org/opensearch/action/admin/indices/alias/Alias.java b/server/src/main/java/org/opensearch/action/admin/indices/alias/Alias.java index 9118cdd56babd..2f304668df01c 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/alias/Alias.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/alias/Alias.java @@ -40,6 +40,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; @@ -127,7 +128,7 @@ public Alias filter(Map filter) { return this; } try { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.map(filter); this.filter = Strings.toString(builder); return this; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java index 142cbe6a0ab0b..6ed9648502ee3 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -48,6 +48,7 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.ConstructingObjectParser; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ObjectParser; import org.opensearch.core.xcontent.ObjectParser.ValueType; import org.opensearch.core.xcontent.ToXContent; @@ -428,7 +429,7 @@ public AliasActions filter(Map filter) { return this; } try { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.map(filter); this.filter = org.opensearch.common.Strings.toString(builder); return this; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java index 465a44556c081..2588dfd718e71 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -49,6 +49,7 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.index.Index; @@ -298,7 +299,7 @@ public PutMappingRequest source(XContentBuilder mappingBuilder) { */ public PutMappingRequest source(Map mappingSource) { try { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.map(mappingSource); return source(BytesReference.bytes(builder), builder.contentType()); } catch (IOException e) { diff --git a/server/src/main/java/org/opensearch/action/index/IndexRequest.java b/server/src/main/java/org/opensearch/action/index/IndexRequest.java index d686f0b460634..96d27917d5164 100644 --- a/server/src/main/java/org/opensearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/opensearch/action/index/IndexRequest.java @@ -54,11 +54,11 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.unit.ByteSizeValue; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.VersionType; import org.opensearch.index.mapper.MapperService; @@ -378,7 +378,7 @@ public IndexRequest source(Map source) throws OpenSearchGenerationExc */ public IndexRequest source(Map source, MediaType contentType) throws OpenSearchGenerationException { try { - XContentBuilder builder = XContentFactory.contentBuilder(contentType); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(contentType); builder.map(source); return source(builder); } catch (IOException e) { @@ -434,7 +434,7 @@ public IndexRequest source(MediaType mediaType, Object... source) { ); } try { - XContentBuilder builder = XContentFactory.contentBuilder(mediaType); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(mediaType); builder.startObject(); for (int i = 0; i < source.length; i++) { builder.field(source[i++].toString(), source[i]); diff --git a/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java b/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java index 71200b05d70ad..57cc4698cefce 100644 --- a/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java +++ b/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java @@ -49,8 +49,8 @@ import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.util.set.Sets; import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.VersionType; import org.opensearch.index.mapper.MapperService; @@ -306,7 +306,7 @@ public TermVectorsRequest doc(XContentBuilder documentBuilder) { */ @Deprecated public TermVectorsRequest doc(BytesReference doc, boolean generateRandomId) { - return this.doc(doc, generateRandomId, XContentHelper.xContentType(doc)); + return this.doc(doc, generateRandomId, MediaTypeRegistry.xContentType(doc)); } /** diff --git a/server/src/main/java/org/opensearch/action/update/UpdateRequest.java b/server/src/main/java/org/opensearch/action/update/UpdateRequest.java index 44454630ff24d..86ebc0d9b69d6 100644 --- a/server/src/main/java/org/opensearch/action/update/UpdateRequest.java +++ b/server/src/main/java/org/opensearch/action/update/UpdateRequest.java @@ -49,11 +49,11 @@ import org.opensearch.core.ParseField; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ObjectParser; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -107,12 +107,12 @@ public class UpdateRequest extends InstanceShardOperationRequest ); PARSER.declareBoolean(UpdateRequest::scriptedUpsert, SCRIPTED_UPSERT_FIELD); PARSER.declareObject((request, builder) -> request.safeUpsertRequest().source(builder), (parser, context) -> { - XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(parser.contentType()); builder.copyCurrentStructure(parser); return builder; }, UPSERT_FIELD); PARSER.declareObject((request, builder) -> request.safeDoc().source(builder), (parser, context) -> { - XContentBuilder docBuilder = XContentFactory.contentBuilder(parser.contentType()); + XContentBuilder docBuilder = MediaTypeRegistry.contentBuilder(parser.contentType()); docBuilder.copyCurrentStructure(parser); return docBuilder; }, DOC_FIELD); diff --git a/server/src/main/java/org/opensearch/cluster/metadata/AliasMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/AliasMetadata.java index 44f6f2d7313a3..27ecca0358bd8 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/AliasMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/AliasMetadata.java @@ -45,6 +45,7 @@ import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.Strings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; @@ -307,7 +308,7 @@ public Builder filter(String filter) { this.filter = null; return this; } - return filter(XContentHelper.convertToMap(XContentFactory.xContent(filter), filter, true)); + return filter(XContentHelper.convertToMap(MediaTypeRegistry.xContent(filter).xContent(), filter, true)); } public Builder filter(Map filter) { diff --git a/server/src/main/java/org/opensearch/cluster/metadata/AliasValidator.java b/server/src/main/java/org/opensearch/cluster/metadata/AliasValidator.java index 5a019804f5eac..9b9b91a07a5cf 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/AliasValidator.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/AliasValidator.java @@ -36,10 +36,10 @@ import org.opensearch.common.Nullable; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.common.Strings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryShardContext; @@ -88,7 +88,7 @@ public void validateAliasStandalone(Alias alias) { validateAliasStandalone(alias.name(), alias.indexRouting()); if (Strings.hasLength(alias.filter())) { try { - XContentHelper.convertToMap(XContentFactory.xContent(alias.filter()), alias.filter(), false); + XContentHelper.convertToMap(MediaTypeRegistry.xContent(alias.filter()).xContent(), alias.filter(), false); } catch (Exception e) { throw new IllegalArgumentException("failed to parse filter for alias [" + alias.name() + "]", e); } @@ -134,7 +134,8 @@ public void validateAliasFilter( ) { assert queryShardContext != null; try ( - XContentParser parser = XContentFactory.xContent(filter) + XContentParser parser = MediaTypeRegistry.xContent(filter) + .xContent() .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, filter) ) { validateAliasFilter(parser, queryShardContext); @@ -158,7 +159,7 @@ public void validateAliasFilter( try ( InputStream inputStream = filter.streamInput(); - XContentParser parser = XContentFactory.xContentType(inputStream) + XContentParser parser = MediaTypeRegistry.xContentType(inputStream) .xContent() .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, filter.streamInput()) ) { diff --git a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java index 1ba38daa40566..bcf9cbd0efef8 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java @@ -53,9 +53,9 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.common.Strings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; @@ -1296,7 +1296,7 @@ public Builder putMapping(String source) throws IOException { putMapping( new MappingMetadata( MapperService.SINGLE_MAPPING_NAME, - XContentHelper.convertToMap(XContentFactory.xContent(source), source, true) + XContentHelper.convertToMap(MediaTypeRegistry.xContent(source).xContent(), source, true) ) ); return this; diff --git a/server/src/main/java/org/opensearch/common/compress/CompressorFactory.java b/server/src/main/java/org/opensearch/common/compress/CompressorFactory.java index 62ec933fe5f37..ee2d5a650fc70 100644 --- a/server/src/main/java/org/opensearch/common/compress/CompressorFactory.java +++ b/server/src/main/java/org/opensearch/common/compress/CompressorFactory.java @@ -34,8 +34,8 @@ import org.opensearch.common.Nullable; import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.common.compress.NotXContentException; +import org.opensearch.core.xcontent.MediaTypeRegistry; import java.io.IOException; import java.util.Objects; @@ -67,14 +67,14 @@ public static Compressor compressor(BytesReference bytes) { // bytes should be either detected as compressed or as xcontent, // if we have bytes that can be either detected as compressed or // as a xcontent, we have a problem - assert XContentHelper.xContentType(bytes) == null; + assert MediaTypeRegistry.xContentType(bytes) == null; return DEFLATE_COMPRESSOR; } else if (ZSTD_COMPRESSOR.isCompressed(bytes)) { - assert XContentHelper.xContentType(bytes) == null; + assert MediaTypeRegistry.xContentType(bytes) == null; return ZSTD_COMPRESSOR; } - if (XContentHelper.xContentType(bytes) == null) { + if (MediaTypeRegistry.xContentType(bytes) == null) { throw new NotXContentException("Compressor detection can only be called on some xcontent bytes or compressed xcontent bytes"); } diff --git a/server/src/main/java/org/opensearch/common/settings/Settings.java b/server/src/main/java/org/opensearch/common/settings/Settings.java index a7649a1cd22c5..10a619d833add 100644 --- a/server/src/main/java/org/opensearch/common/settings/Settings.java +++ b/server/src/main/java/org/opensearch/common/settings/Settings.java @@ -48,7 +48,7 @@ import org.opensearch.common.unit.MemorySizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentParserUtils; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.settings.SecureString; @@ -1081,7 +1081,7 @@ private void processLegacyLists(Map map) { */ public Builder loadFromMap(Map map) { // TODO: do this without a serialization round-trip - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON)) { builder.map(map); return loadFromSource(Strings.toString(builder), builder.contentType()); } catch (IOException e) { @@ -1094,7 +1094,7 @@ public Builder loadFromMap(Map map) { */ public Builder loadFromSource(String source, MediaType xContentType) { try ( - XContentParser parser = XContentFactory.xContent(xContentType) + XContentParser parser = xContentType.xContent() .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, source) ) { this.put(fromXContent(parser, true, true)); @@ -1127,7 +1127,7 @@ public Builder loadFromStream(String resourceName, InputStream is, boolean accep } // fromXContent doesn't use named xcontent or deprecation. try ( - XContentParser parser = XContentFactory.xContent(xContentType) + XContentParser parser = xContentType.xContent() .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, is) ) { if (parser.currentToken() == null) { diff --git a/server/src/main/java/org/opensearch/common/xcontent/XContentHelper.java b/server/src/main/java/org/opensearch/common/xcontent/XContentHelper.java index ae1fb0724fd9e..a0903cbdd9f61 100644 --- a/server/src/main/java/org/opensearch/common/xcontent/XContentHelper.java +++ b/server/src/main/java/org/opensearch/common/xcontent/XContentHelper.java @@ -41,6 +41,7 @@ import org.opensearch.common.compress.CompressorFactory; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContent.Params; @@ -52,7 +53,6 @@ import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; -import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; @@ -86,14 +86,14 @@ public static XContentParser createParser( if (compressedInput.markSupported() == false) { compressedInput = new BufferedInputStream(compressedInput); } - final XContentType contentType = XContentFactory.xContentType(compressedInput); - return XContentFactory.xContent(contentType).createParser(xContentRegistry, deprecationHandler, compressedInput); + final MediaType contentType = MediaTypeRegistry.xContentType(compressedInput); + return contentType.xContent().createParser(xContentRegistry, deprecationHandler, compressedInput); } catch (Exception e) { if (compressedInput != null) compressedInput.close(); throw e; } } else { - return XContentFactory.xContent(xContentType(bytes)).createParser(xContentRegistry, deprecationHandler, bytes.streamInput()); + return MediaTypeRegistry.xContentType(bytes).xContent().createParser(xContentRegistry, deprecationHandler, bytes.streamInput()); } } @@ -115,7 +115,7 @@ public static XContentParser createParser( if (compressedInput.markSupported() == false) { compressedInput = new BufferedInputStream(compressedInput); } - return XContentFactory.xContent(mediaType).createParser(xContentRegistry, deprecationHandler, compressedInput); + return mediaType.xContent().createParser(xContentRegistry, deprecationHandler, compressedInput); } catch (Exception e) { if (compressedInput != null) compressedInput.close(); throw e; @@ -179,20 +179,14 @@ public static Tuple> convertToMap( final byte[] raw = arr.array(); final int offset = arr.offset(); final int length = arr.length(); - contentType = xContentType != null ? xContentType : XContentFactory.xContentType(raw, offset, length); - return new Tuple<>( - Objects.requireNonNull(contentType), - convertToMap(XContentFactory.xContent(contentType), raw, offset, length, ordered) - ); + contentType = xContentType != null ? xContentType : MediaTypeRegistry.mediaTypeFromBytes(raw, offset, length); + return new Tuple<>(Objects.requireNonNull(contentType), convertToMap(contentType.xContent(), raw, offset, length, ordered)); } else { input = bytes.streamInput(); } try (InputStream stream = input) { - contentType = xContentType != null ? xContentType : XContentFactory.xContentType(stream); - return new Tuple<>( - Objects.requireNonNull(contentType), - convertToMap(XContentFactory.xContent(contentType), stream, ordered) - ); + contentType = xContentType != null ? xContentType : MediaTypeRegistry.xContentType(stream); + return new Tuple<>(Objects.requireNonNull(contentType), convertToMap(contentType.xContent(), stream, ordered)); } } catch (IOException e) { throw new OpenSearchParseException("Failed to parse content to map", e); @@ -266,7 +260,7 @@ public static String convertToJson(BytesReference bytes, boolean reformatJson) t @Deprecated public static String convertToJson(BytesReference bytes, boolean reformatJson, boolean prettyPrint) throws IOException { - return convertToJson(bytes, reformatJson, prettyPrint, XContentFactory.xContentType(bytes.toBytesRef().bytes)); + return convertToJson(bytes, reformatJson, prettyPrint, MediaTypeRegistry.xContent(bytes.toBytesRef().bytes)); } public static String convertToJson(BytesReference bytes, boolean reformatJson, MediaType xContentType) throws IOException { @@ -311,7 +305,7 @@ public static String convertToJson(BytesReference bytes, boolean reformatJson, b if (bytes instanceof BytesArray) { final BytesArray array = (BytesArray) bytes; try ( - XContentParser parser = XContentFactory.xContent(mediaType) + XContentParser parser = mediaType.xContent() .createParser( NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, @@ -325,7 +319,7 @@ public static String convertToJson(BytesReference bytes, boolean reformatJson, b } else { try ( InputStream stream = bytes.streamInput(); - XContentParser parser = XContentFactory.xContent(mediaType) + XContentParser parser = mediaType.xContent() .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream) ) { return toJsonString(prettyPrint, parser); @@ -545,29 +539,6 @@ public static BytesReference toXContent(ToXContent toXContent, MediaType mediaTy } } - /** - * Guesses the content type based on the provided bytes. - * - * @deprecated the content type should not be guessed except for few cases where we effectively don't know the content type. - * The REST layer should move to reading the Content-Type header instead. There are other places where auto-detection may be needed. - * This method is deprecated to prevent usages of it from spreading further without specific reasons. - */ - @Deprecated - public static XContentType xContentType(BytesReference bytes) { - if (bytes instanceof BytesArray) { - final BytesArray array = (BytesArray) bytes; - return XContentFactory.xContentType(array.array(), array.offset(), array.length()); - } - try { - final InputStream inputStream = bytes.streamInput(); - assert inputStream.markSupported(); - return XContentFactory.xContentType(inputStream); - } catch (IOException e) { - assert false : "Should not happen, we're just reading bytes from memory"; - throw new UncheckedIOException(e); - } - } - /** * Returns the contents of an object as an unparsed BytesReference * diff --git a/server/src/main/java/org/opensearch/gateway/MetadataStateFormat.java b/server/src/main/java/org/opensearch/gateway/MetadataStateFormat.java index 9943f04c964e1..2a419031b2c5a 100644 --- a/server/src/main/java/org/opensearch/gateway/MetadataStateFormat.java +++ b/server/src/main/java/org/opensearch/gateway/MetadataStateFormat.java @@ -49,12 +49,12 @@ import org.opensearch.common.lucene.store.IndexOutputOutputStream; import org.opensearch.common.lucene.store.InputStreamIndexInput; import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.common.util.io.IOUtils; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.common.util.io.IOUtils; import java.io.FileNotFoundException; import java.io.IOException; @@ -288,7 +288,7 @@ private long write(final T state, boolean cleanup, final Path... locations) thro } protected XContentBuilder newXContentBuilder(XContentType type, OutputStream stream) throws IOException { - return XContentFactory.contentBuilder(type, stream); + return MediaTypeRegistry.contentBuilder(type, stream); } /** @@ -321,7 +321,7 @@ public final T read(NamedXContentRegistry namedXContentRegistry, Path file) thro long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) { try ( - XContentParser parser = XContentFactory.xContent(FORMAT) + XContentParser parser = FORMAT.xContent() .createParser( namedXContentRegistry, LoggingDeprecationHandler.INSTANCE, diff --git a/server/src/main/java/org/opensearch/gateway/PersistedClusterStateService.java b/server/src/main/java/org/opensearch/gateway/PersistedClusterStateService.java index caddf92150cff..c01f51eb7188a 100644 --- a/server/src/main/java/org/opensearch/gateway/PersistedClusterStateService.java +++ b/server/src/main/java/org/opensearch/gateway/PersistedClusterStateService.java @@ -67,7 +67,6 @@ import org.opensearch.common.CheckedConsumer; import org.opensearch.common.Nullable; import org.opensearch.common.SetOnce; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.bytes.RecyclingBytesStreamOutput; import org.opensearch.common.io.Streams; import org.opensearch.common.logging.Loggers; @@ -76,14 +75,15 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.BigArrays; -import org.opensearch.core.common.util.ByteArray; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.util.io.IOUtils; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.util.ByteArray; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; @@ -448,7 +448,7 @@ private OnDiskState loadOnDiskState(Path dataPath, DirectoryReader reader) throw final SetOnce builderReference = new SetOnce<>(); consumeFromType(searcher, GLOBAL_TYPE_NAME, bytes -> { final Metadata metadata = Metadata.Builder.fromXContent( - XContentFactory.xContent(XContentType.SMILE) + XContentType.SMILE.xContent() .createParser(namedXContentRegistry, LoggingDeprecationHandler.INSTANCE, bytes.bytes, bytes.offset, bytes.length) ); logger.trace("found global metadata with last-accepted term [{}]", metadata.coordinationMetadata().term()); @@ -468,7 +468,7 @@ private OnDiskState loadOnDiskState(Path dataPath, DirectoryReader reader) throw final Set indexUUIDs = new HashSet<>(); consumeFromType(searcher, INDEX_TYPE_NAME, bytes -> { final IndexMetadata indexMetadata = IndexMetadata.fromXContent( - XContentFactory.xContent(XContentType.SMILE) + XContentType.SMILE.xContent() .createParser(namedXContentRegistry, LoggingDeprecationHandler.INSTANCE, bytes.bytes, bytes.offset, bytes.length) ); logger.trace("found index metadata for {}", indexMetadata.getIndex()); @@ -921,7 +921,7 @@ private Document makeDocument(String typeName, ToXContent metadata, DocumentBuff try (RecyclingBytesStreamOutput streamOutput = documentBuffer.streamOutput()) { try ( - XContentBuilder xContentBuilder = XContentFactory.contentBuilder( + XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder( XContentType.SMILE, Streams.flushOnCloseStream(streamOutput) ) diff --git a/server/src/main/java/org/opensearch/index/get/ShardGetService.java b/server/src/main/java/org/opensearch/index/get/ShardGetService.java index b3715e097322d..8f81e704ef2d4 100644 --- a/server/src/main/java/org/opensearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/opensearch/index/get/ShardGetService.java @@ -51,10 +51,10 @@ import org.opensearch.common.metrics.CounterMetric; import org.opensearch.common.metrics.MeanMetric; import org.opensearch.common.util.set.Sets; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.support.XContentMapValues; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.index.IndexSettings; import org.opensearch.index.VersionType; import org.opensearch.index.engine.Engine; @@ -299,7 +299,7 @@ private GetResult innerGetLoadFromStoredFields( shardId.getIndexName(), id, source, - XContentHelper.xContentType(source), + MediaTypeRegistry.xContentType(source), fieldVisitor.routing() ); ParsedDocument doc = indexShard.mapperService().documentMapper().parse(sourceToParse); @@ -378,7 +378,7 @@ private GetResult innerGetLoadFromStoredFields( sourceAsMap = typeMapTuple.v2(); sourceAsMap = XContentMapValues.filter(sourceAsMap, fetchSourceContext.includes(), fetchSourceContext.excludes()); try { - source = BytesReference.bytes(XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap)); + source = BytesReference.bytes(MediaTypeRegistry.contentBuilder(sourceContentType).map(sourceAsMap)); } catch (IOException e) { throw new OpenSearchException("Failed to get id [" + id + "] with includes/excludes set", e); } @@ -406,7 +406,7 @@ private GetResult innerGetLoadFromStoredFields( sourceAsMap = typeMapTuple.v2(); sourceAsMap = XContentMapValues.filter(sourceAsMap, fetchSourceContext.includes(), fetchSourceContext.excludes()); try { - source = BytesReference.bytes(XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap)); + source = BytesReference.bytes(MediaTypeRegistry.contentBuilder(sourceContentType).map(sourceAsMap)); } catch (IOException e) { throw new OpenSearchException("Failed to get id [" + id + "] with includes/excludes set", e); } diff --git a/server/src/main/java/org/opensearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/SourceFieldMapper.java index ad467d045b7e4..deda9883bc07b 100644 --- a/server/src/main/java/org/opensearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/SourceFieldMapper.java @@ -39,15 +39,15 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.opensearch.common.Nullable; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.collect.Tuple; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.util.CollectionUtils; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.support.XContentMapValues; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryShardContext; import org.opensearch.index.query.QueryShardException; @@ -228,7 +228,7 @@ public BytesReference applyFilters(@Nullable BytesReference originalSource, @Nul Map filteredSource = filter.apply(mapTuple.v2()); BytesStreamOutput bStream = new BytesStreamOutput(); MediaType actualContentType = mapTuple.v1(); - XContentBuilder builder = XContentFactory.contentBuilder(actualContentType, bStream).map(filteredSource); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(actualContentType, bStream).map(filteredSource); builder.close(); return bStream.bytes(); } else { diff --git a/server/src/main/java/org/opensearch/index/query/WrapperQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/WrapperQueryBuilder.java index aab9adecb9a62..b0fc62ad8ec71 100644 --- a/server/src/main/java/org/opensearch/index/query/WrapperQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/WrapperQueryBuilder.java @@ -34,15 +34,15 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.core.ParseField; import org.opensearch.core.common.ParsingException; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.core.common.Strings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import java.io.IOException; @@ -175,7 +175,8 @@ protected boolean doEquals(WrapperQueryBuilder other) { @Override protected QueryBuilder doRewrite(QueryRewriteContext context) throws IOException { try ( - XContentParser qSourceParser = XContentFactory.xContent(source) + XContentParser qSourceParser = MediaTypeRegistry.xContent(source) + .xContent() .createParser(context.getXContentRegistry(), LoggingDeprecationHandler.INSTANCE, source) ) { diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java index 40b15eace2bad..a4c4aa9493288 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java @@ -49,10 +49,10 @@ import org.opensearch.common.unit.DistanceUnit; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.fielddata.FieldData; import org.opensearch.index.fielddata.IndexGeoPointFieldData; @@ -230,7 +230,8 @@ protected ScoreFunction doToFunction(QueryShardContext context) throws IOExcepti // EMPTY is safe because parseVariable doesn't use namedObject try ( InputStream stream = functionBytes.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(functionBytes)) + XContentParser parser = MediaTypeRegistry.xContentType(functionBytes) + .xContent() .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream) ) { scoreFunction = parseVariable(fieldName, parser, context, multiValueMode); diff --git a/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java b/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java index b3ed0079ff4c2..2f03eb66bd71e 100644 --- a/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java +++ b/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java @@ -49,8 +49,8 @@ import org.opensearch.common.document.DocumentField; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; -import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.index.mapper.RoutingFieldMapper; import org.opensearch.search.SearchHit; import org.opensearch.threadpool.ThreadPool; @@ -210,7 +210,7 @@ public BytesReference getSource() { @Override public MediaType getMediaType() { - return XContentHelper.xContentType(source); + return MediaTypeRegistry.xContentType(source); } @Override diff --git a/server/src/main/java/org/opensearch/index/reindex/ReindexRequest.java b/server/src/main/java/org/opensearch/index/reindex/ReindexRequest.java index f6b4793f3b87a..8a9a346a5344a 100644 --- a/server/src/main/java/org/opensearch/index/reindex/ReindexRequest.java +++ b/server/src/main/java/org/opensearch/index/reindex/ReindexRequest.java @@ -44,10 +44,10 @@ import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.common.Strings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ObjectParser; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.VersionType; import org.opensearch.index.query.QueryBuilder; @@ -356,7 +356,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws request.getSearchRequest().indices(indices); } request.setRemoteInfo(buildRemoteInfo(source)); - XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(parser.contentType()); builder.map(source); try ( InputStream stream = BytesReference.bytes(builder).streamInput(); diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index e43b9773cc1e0..e09a218ccf83b 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -98,11 +98,11 @@ import org.opensearch.common.util.concurrent.RunOnce; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.common.util.set.Sets; -import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.util.io.IOUtils; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; import org.opensearch.core.index.shard.ShardId; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.gateway.WriteStateException; import org.opensearch.core.index.Index; import org.opensearch.index.IndexModule; @@ -2171,7 +2171,7 @@ private Engine.Result applyTranslogOperation(Engine engine, Translog.Operation o shardId.getIndexName(), index.id(), index.source(), - XContentHelper.xContentType(index.source()), + MediaTypeRegistry.xContentType(index.source()), index.routing() ) ); diff --git a/server/src/main/java/org/opensearch/indices/IndicesService.java b/server/src/main/java/org/opensearch/indices/IndicesService.java index b574ffd1006c0..f4841a8ca9a28 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesService.java +++ b/server/src/main/java/org/opensearch/indices/IndicesService.java @@ -82,12 +82,12 @@ import org.opensearch.common.util.iterable.Iterables; import org.opensearch.common.util.set.Sets; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.util.io.IOUtils; import org.opensearch.common.lease.Releasable; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; import org.opensearch.core.util.FileSystemUtils; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.env.NodeEnvironment; @@ -1727,7 +1727,7 @@ public AliasFilter buildAliasFilter(ClusterState state, String index, Set filterParser = bytes -> { try ( InputStream inputStream = bytes.streamInput(); - XContentParser parser = XContentFactory.xContentType(inputStream) + XContentParser parser = MediaTypeRegistry.xContentType(inputStream) .xContent() .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, inputStream) ) { diff --git a/server/src/main/java/org/opensearch/repositories/blobstore/ChecksumBlobStoreFormat.java b/server/src/main/java/org/opensearch/repositories/blobstore/ChecksumBlobStoreFormat.java index cb3d779ece4a9..a138bfc8a7044 100644 --- a/server/src/main/java/org/opensearch/repositories/blobstore/ChecksumBlobStoreFormat.java +++ b/server/src/main/java/org/opensearch/repositories/blobstore/ChecksumBlobStoreFormat.java @@ -43,20 +43,20 @@ import org.opensearch.cluster.metadata.Metadata; import org.opensearch.common.CheckedFunction; import org.opensearch.common.blobstore.BlobContainer; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.compress.Compressor; import org.opensearch.common.io.Streams; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.lucene.store.ByteArrayIndexInput; import org.opensearch.common.lucene.store.IndexOutputOutputStream; import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentHelper; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.gateway.CorruptStateException; import org.opensearch.snapshots.SnapshotInfo; @@ -185,7 +185,7 @@ public void close() throws IOException { // in order to write the footer we need to prevent closing the actual index input. } }; - XContentBuilder builder = XContentFactory.contentBuilder( + XContentBuilder builder = MediaTypeRegistry.contentBuilder( XContentType.SMILE, compressor.threadLocalOutputStream(indexOutputOutputStream) ) diff --git a/server/src/main/java/org/opensearch/rest/AbstractRestChannel.java b/server/src/main/java/org/opensearch/rest/AbstractRestChannel.java index 32499b1fc155b..11a116e8c858d 100644 --- a/server/src/main/java/org/opensearch/rest/AbstractRestChannel.java +++ b/server/src/main/java/org/opensearch/rest/AbstractRestChannel.java @@ -38,7 +38,6 @@ import org.opensearch.core.xcontent.MediaType; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import java.io.IOException; import java.io.OutputStream; @@ -145,12 +144,7 @@ public XContentBuilder newBuilder(@Nullable MediaType requestContentType, @Nulla } OutputStream unclosableOutputStream = Streams.flushOnCloseStream(bytesOutput()); - XContentBuilder builder = new XContentBuilder( - XContentFactory.xContent(responseContentType), - unclosableOutputStream, - includes, - excludes - ); + XContentBuilder builder = new XContentBuilder(responseContentType.xContent(), unclosableOutputStream, includes, excludes); if (pretty) { builder.prettyPrint().lfAtEnd(); } diff --git a/server/src/main/java/org/opensearch/rest/action/document/RestGetSourceAction.java b/server/src/main/java/org/opensearch/rest/action/document/RestGetSourceAction.java index b4abe449e027f..ce5e12d6fd163 100644 --- a/server/src/main/java/org/opensearch/rest/action/document/RestGetSourceAction.java +++ b/server/src/main/java/org/opensearch/rest/action/document/RestGetSourceAction.java @@ -38,8 +38,8 @@ import org.opensearch.action.get.GetResponse; import org.opensearch.client.node.NodeClient; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; @@ -111,7 +111,7 @@ public RestResponse buildResponse(final GetResponse response) throws Exception { final XContentBuilder builder = channel.newBuilder(request.getMediaType(), false); final BytesReference source = response.getSourceInternal(); try (InputStream stream = source.streamInput()) { - builder.rawValue(stream, XContentHelper.xContentType(source)); + builder.rawValue(stream, MediaTypeRegistry.xContentType(source)); } return new BytesRestResponse(OK, builder); } diff --git a/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggestionBuilder.java b/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggestionBuilder.java index 6f3c3f471b47c..1276bc824c05b 100644 --- a/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/server/src/main/java/org/opensearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -38,11 +38,11 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.unit.Fuzziness; import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ObjectParser; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -111,7 +111,7 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder { // Copy the current structure. We will parse, once the mapping is provided - XContentBuilder builder = XContentFactory.contentBuilder(CONTEXT_BYTES_XCONTENT_TYPE); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(CONTEXT_BYTES_XCONTENT_TYPE); builder.copyCurrentStructure(p); v.contextBytes = BytesReference.bytes(builder); p.skipChildren(); @@ -216,7 +216,7 @@ public CompletionSuggestionBuilder regex(String regex, RegexOptions regexOptions public CompletionSuggestionBuilder contexts(Map> queryContexts) { Objects.requireNonNull(queryContexts, "contexts must not be null"); try { - XContentBuilder contentBuilder = XContentFactory.contentBuilder(CONTEXT_BYTES_XCONTENT_TYPE); + XContentBuilder contentBuilder = MediaTypeRegistry.contentBuilder(CONTEXT_BYTES_XCONTENT_TYPE); contentBuilder.startObject(); for (Map.Entry> contextEntry : queryContexts.entrySet()) { contentBuilder.startArray(contextEntry.getKey()); diff --git a/server/src/main/java/org/opensearch/search/suggest/phrase/PhraseSuggester.java b/server/src/main/java/org/opensearch/search/suggest/phrase/PhraseSuggester.java index 4b1c43bf7a317..6992af62b6626 100644 --- a/server/src/main/java/org/opensearch/search/suggest/phrase/PhraseSuggester.java +++ b/server/src/main/java/org/opensearch/search/suggest/phrase/PhraseSuggester.java @@ -44,7 +44,7 @@ import org.opensearch.common.lucene.Lucene; import org.opensearch.core.common.text.Text; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.query.AbstractQueryBuilder; import org.opensearch.index.query.ParsedQuery; @@ -160,7 +160,8 @@ public Suggestion> innerExecute( QueryShardContext shardContext = suggestion.getShardContext(); final String querySource = scriptFactory.newInstance(vars).execute(); try ( - XContentParser parser = XContentFactory.xContent(querySource) + XContentParser parser = MediaTypeRegistry.xContent(querySource) + .xContent() .createParser(shardContext.getXContentRegistry(), LoggingDeprecationHandler.INSTANCE, querySource) ) { QueryBuilder innerQueryBuilder = AbstractQueryBuilder.parseInnerQueryBuilder(parser); diff --git a/server/src/main/java/org/opensearch/tasks/RawTaskStatus.java b/server/src/main/java/org/opensearch/tasks/RawTaskStatus.java index 28a09fd80b408..041a5d32b027c 100644 --- a/server/src/main/java/org/opensearch/tasks/RawTaskStatus.java +++ b/server/src/main/java/org/opensearch/tasks/RawTaskStatus.java @@ -36,8 +36,8 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import java.io.IOException; @@ -76,7 +76,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { try (InputStream stream = status.streamInput()) { - return builder.rawValue(stream, XContentHelper.xContentType(status)); + return builder.rawValue(stream, MediaTypeRegistry.xContentType(status)); } } diff --git a/server/src/main/java/org/opensearch/tasks/TaskResult.java b/server/src/main/java/org/opensearch/tasks/TaskResult.java index b6006636da2e9..34efbe3ec572b 100644 --- a/server/src/main/java/org/opensearch/tasks/TaskResult.java +++ b/server/src/main/java/org/opensearch/tasks/TaskResult.java @@ -42,10 +42,10 @@ import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.core.xcontent.InstantiatingObjectParser; import org.opensearch.common.xcontent.ObjectParserHelper; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; @@ -238,7 +238,7 @@ public int hashCode() { } private static BytesReference toXContent(Exception error) throws IOException { - try (XContentBuilder builder = XContentFactory.contentBuilder(Requests.INDEX_CONTENT_TYPE)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(Requests.INDEX_CONTENT_TYPE)) { builder.startObject(); OpenSearchException.generateThrowableXContent(builder, ToXContent.EMPTY_PARAMS, error); builder.endObject(); diff --git a/server/src/main/java/org/opensearch/tasks/TaskResultsService.java b/server/src/main/java/org/opensearch/tasks/TaskResultsService.java index 1feb115cb585a..01be4eaaaf732 100644 --- a/server/src/main/java/org/opensearch/tasks/TaskResultsService.java +++ b/server/src/main/java/org/opensearch/tasks/TaskResultsService.java @@ -54,9 +54,9 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.util.io.Streams; import org.opensearch.threadpool.ThreadPool; @@ -169,7 +169,7 @@ private int getTaskResultMappingVersion(IndexMetadata metadata) { private void doStoreResult(TaskResult taskResult, ActionListener listener) { IndexRequestBuilder index = client.prepareIndex(TASK_INDEX).setId(taskResult.getTask().getTaskId().toString()); - try (XContentBuilder builder = XContentFactory.contentBuilder(Requests.INDEX_CONTENT_TYPE)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(Requests.INDEX_CONTENT_TYPE)) { taskResult.toXContent(builder, ToXContent.EMPTY_PARAMS); index.setSource(builder); } catch (IOException e) { diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 6885278b08413..c463f7a8f4b8c 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -56,9 +56,9 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.common.settings.Settings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import org.opensearch.tasks.Task; @@ -761,7 +761,7 @@ public void testTasksToXContentGrouping() throws Exception { } private Map serialize(ListTasksResponse response, boolean byParents) throws IOException { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.startObject(); if (byParents) { DiscoveryNodes nodes = testNodes[0].clusterService.state().nodes(); diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java index e92096f139d17..c9b57d0c41b45 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java @@ -46,10 +46,10 @@ import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.network.NetworkModule; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.admin.cluster.RestClusterRerouteAction; @@ -212,7 +212,7 @@ private ClusterRerouteRequest roundTripThroughRestRequest(ClusterRerouteRequest private RestRequest toRestRequest(ClusterRerouteRequest original) throws IOException { Map params = new HashMap<>(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); boolean hasBody = false; if (randomBoolean()) { builder.prettyPrint(); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/alias/AliasActionsTests.java b/server/src/test/java/org/opensearch/action/admin/indices/alias/AliasActionsTests.java index 265f02304ce00..2c4462b964d65 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/alias/AliasActionsTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/alias/AliasActionsTests.java @@ -37,9 +37,9 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParseException; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -195,7 +195,7 @@ public void testParseAdd() throws IOException { if (filter == null || filter.isEmpty()) { assertNull(action.filter()); } else { - assertEquals(Strings.toString(XContentFactory.contentBuilder(XContentType.JSON).map(filter)), action.filter()); + assertEquals(Strings.toString(MediaTypeRegistry.contentBuilder(XContentType.JSON).map(filter)), action.filter()); } assertEquals(Objects.toString(searchRouting, null), action.searchRouting()); assertEquals(Objects.toString(indexRouting, null), action.indexRouting()); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java index e4114a622602c..c8a24fd0482cf 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -38,9 +38,9 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; @@ -101,7 +101,7 @@ public void testMappingKeyedByType() throws IOException { CreateIndexRequest request1 = new CreateIndexRequest("foo"); CreateIndexRequest request2 = new CreateIndexRequest("bar"); { - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); builder.startObject() .startObject("properties") .startObject("field1") @@ -117,7 +117,7 @@ public void testMappingKeyedByType() throws IOException { .endObject() .endObject(); request1.mapping(builder); - builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); builder.startObject() .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") @@ -140,7 +140,7 @@ public void testMappingKeyedByType() throws IOException { } public void testSettingsType() throws IOException { - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); builder.startObject().startArray("settings").endArray().endObject(); CreateIndexRequest parsedCreateIndexRequest = new CreateIndexRequest(); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java index 4b23a68f8b383..9de9e81ee9cae 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -43,6 +43,7 @@ import org.opensearch.common.unit.ByteSizeUnit; import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; @@ -179,7 +180,7 @@ public void testSerialize() throws Exception { public void testUnknownFields() throws IOException { final RolloverRequest request = new RolloverRequest(); XContentType xContentType = randomFrom(XContentType.values()); - final XContentBuilder builder = XContentFactory.contentBuilder(xContentType); + final XContentBuilder builder = MediaTypeRegistry.contentBuilder(xContentType); builder.startObject(); { builder.startObject("conditions"); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java index dbca883506b0d..f069ff3bf7ff2 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java @@ -35,6 +35,7 @@ import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.settings.Settings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; @@ -76,7 +77,7 @@ public void testMappingKeyedByType() throws IOException { PutIndexTemplateRequest request1 = new PutIndexTemplateRequest("foo"); PutIndexTemplateRequest request2 = new PutIndexTemplateRequest("bar"); { - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); builder.startObject() .startObject("properties") .startObject("field1") @@ -92,7 +93,7 @@ public void testMappingKeyedByType() throws IOException { .endObject() .endObject(); request1.mapping(builder); - builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); builder.startObject() .startObject("properties") .startObject("field1") diff --git a/server/src/test/java/org/opensearch/action/bulk/BulkRequestTests.java b/server/src/test/java/org/opensearch/action/bulk/BulkRequestTests.java index c674be8dbba38..2f2c9b2be9e50 100644 --- a/server/src/test/java/org/opensearch/action/bulk/BulkRequestTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/BulkRequestTests.java @@ -39,14 +39,14 @@ import org.opensearch.action.support.WriteRequest.RefreshPolicy; import org.opensearch.action.update.UpdateRequest; import org.opensearch.client.Requests; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.common.xcontent.XContentHelper; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.ParsingException; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.script.Script; import org.opensearch.test.OpenSearchTestCase; @@ -340,7 +340,7 @@ public void testSmileIsSupported() throws IOException { XContentType xContentType = XContentType.SMILE; BytesReference data; try (BytesStreamOutput out = new BytesStreamOutput()) { - try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType, out)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(xContentType, out)) { builder.startObject(); builder.startObject("index"); builder.field("_index", "index"); @@ -349,7 +349,7 @@ public void testSmileIsSupported() throws IOException { builder.endObject(); } out.write(xContentType.xContent().streamSeparator()); - try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType, out)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(xContentType, out)) { builder.startObject(); builder.field("field", "value"); builder.endObject(); @@ -375,7 +375,7 @@ public void testToValidateUpsertRequestAndCASInBulkRequest() throws IOException XContentType xContentType = XContentType.SMILE; BytesReference data; try (BytesStreamOutput out = new BytesStreamOutput()) { - try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType, out)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(xContentType, out)) { builder.startObject(); builder.startObject("update"); builder.field("_index", "index"); @@ -386,7 +386,7 @@ public void testToValidateUpsertRequestAndCASInBulkRequest() throws IOException builder.endObject(); } out.write(xContentType.xContent().streamSeparator()); - try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType, out)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(xContentType, out)) { builder.startObject(); builder.startObject("doc").endObject(); Map values = new HashMap<>(); diff --git a/server/src/test/java/org/opensearch/action/explain/ExplainResponseTests.java b/server/src/test/java/org/opensearch/action/explain/ExplainResponseTests.java index 866e13c7faf0e..14f2a5c94bcd0 100644 --- a/server/src/test/java/org/opensearch/action/explain/ExplainResponseTests.java +++ b/server/src/test/java/org/opensearch/action/explain/ExplainResponseTests.java @@ -37,9 +37,9 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.document.DocumentField; import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.get.GetResult; @@ -114,7 +114,7 @@ public void testToXContent() throws IOException { ); ExplainResponse response = new ExplainResponse(index, id, exist, explanation, getResult); - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); response.toXContent(builder, ToXContent.EMPTY_PARAMS); String generatedResponse = BytesReference.bytes(builder).utf8ToString().replaceAll("\\s+", ""); diff --git a/server/src/test/java/org/opensearch/action/fieldcaps/MergedFieldCapabilitiesResponseTests.java b/server/src/test/java/org/opensearch/action/fieldcaps/MergedFieldCapabilitiesResponseTests.java index 382425ce4e57c..d76ed86ed490e 100644 --- a/server/src/test/java/org/opensearch/action/fieldcaps/MergedFieldCapabilitiesResponseTests.java +++ b/server/src/test/java/org/opensearch/action/fieldcaps/MergedFieldCapabilitiesResponseTests.java @@ -34,9 +34,9 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.test.AbstractSerializingTestCase; @@ -125,7 +125,7 @@ protected Predicate getRandomFieldsExcludeFilter() { public void testToXContent() throws IOException { FieldCapabilitiesResponse response = createSimpleResponse(); - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); response.toXContent(builder, ToXContent.EMPTY_PARAMS); String generatedResponse = BytesReference.bytes(builder).utf8ToString(); diff --git a/server/src/test/java/org/opensearch/action/get/MultiGetRequestTests.java b/server/src/test/java/org/opensearch/action/get/MultiGetRequestTests.java index 5498d685dbb6c..cc1e963bc110d 100644 --- a/server/src/test/java/org/opensearch/action/get/MultiGetRequestTests.java +++ b/server/src/test/java/org/opensearch/action/get/MultiGetRequestTests.java @@ -126,7 +126,7 @@ public void testXContentSerialization() throws IOException { MultiGetRequest expected = createTestInstance(); XContentType xContentType = randomFrom(XContentType.values()); BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, false); - try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { + try (XContentParser parser = createParser(xContentType.xContent(), shuffled)) { MultiGetRequest actual = new MultiGetRequest(); actual.add(null, null, null, null, parser, true); assertThat(parser.nextToken(), nullValue()); diff --git a/server/src/test/java/org/opensearch/action/get/MultiGetResponseTests.java b/server/src/test/java/org/opensearch/action/get/MultiGetResponseTests.java index 71ada43bef116..c2a5f2def31ab 100644 --- a/server/src/test/java/org/opensearch/action/get/MultiGetResponseTests.java +++ b/server/src/test/java/org/opensearch/action/get/MultiGetResponseTests.java @@ -33,7 +33,6 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.get.GetResult; @@ -53,7 +52,7 @@ public void testFromXContent() throws IOException { XContentType xContentType = randomFrom(XContentType.values()); BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, false); MultiGetResponse parsed; - try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { + try (XContentParser parser = createParser(xContentType.xContent(), shuffled)) { parsed = MultiGetResponse.fromXContent(parser); assertNull(parser.nextToken()); } diff --git a/server/src/test/java/org/opensearch/action/support/IndicesOptionsTests.java b/server/src/test/java/org/opensearch/action/support/IndicesOptionsTests.java index 0e25091d845b5..fafd34602c310 100644 --- a/server/src/test/java/org/opensearch/action/support/IndicesOptionsTests.java +++ b/server/src/test/java/org/opensearch/action/support/IndicesOptionsTests.java @@ -38,10 +38,10 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent.MapParams; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.test.OpenSearchTestCase; @@ -422,7 +422,7 @@ public void testFromXContentWithWildcardSpecialValues() throws IOException { final boolean allowNoIndices = randomBoolean(); BytesReference xContentBytes; - try (XContentBuilder builder = XContentFactory.contentBuilder(type)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(type)) { builder.startObject(); builder.field("expand_wildcards", "all"); builder.field("ignore_unavailable", ignoreUnavailable); @@ -441,7 +441,7 @@ public void testFromXContentWithWildcardSpecialValues() throws IOException { assertTrue(fromXContentOptions.expandWildcardsHidden()); assertTrue(fromXContentOptions.expandWildcardsOpen()); - try (XContentBuilder builder = XContentFactory.contentBuilder(type)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(type)) { builder.startObject(); builder.field("expand_wildcards", "none"); builder.field("ignore_unavailable", ignoreUnavailable); @@ -461,7 +461,7 @@ public void testFromXContentWithWildcardSpecialValues() throws IOException { } private BytesReference toXContentBytes(IndicesOptions indicesOptions, XContentType type) throws IOException { - try (XContentBuilder builder = XContentFactory.contentBuilder(type)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(type)) { builder.startObject(); indicesOptions.toXContent(builder, new MapParams(Collections.emptyMap())); builder.endObject(); diff --git a/server/src/test/java/org/opensearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/server/src/test/java/org/opensearch/common/geo/builders/AbstractShapeBuilderTestCase.java index 417a79da68ae1..4968f7740b7c9 100644 --- a/server/src/test/java/org/opensearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/server/src/test/java/org/opensearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -37,9 +37,9 @@ import org.opensearch.core.common.io.stream.NamedWriteable; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.common.io.stream.Writeable.Reader; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.test.OpenSearchTestCase; @@ -86,7 +86,7 @@ public static void afterClass() throws Exception { public void testFromXContent() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { SB testShape = createTestShapeBuilder(); - XContentBuilder contentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder contentBuilder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { contentBuilder.prettyPrint(); } diff --git a/server/src/test/java/org/opensearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/opensearch/common/xcontent/BaseXContentTestCase.java index 274b6477e3fbd..00ab96dab6a5c 100644 --- a/server/src/test/java/org/opensearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/opensearch/common/xcontent/BaseXContentTestCase.java @@ -49,6 +49,7 @@ import org.opensearch.common.unit.DistanceUnit; import org.opensearch.common.util.CollectionUtils; import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedObjectNotFoundException; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; @@ -865,7 +866,7 @@ public void testBasics() throws IOException { generator.writeEndObject(); } byte[] data = os.toByteArray(); - assertEquals(xcontentType(), XContentFactory.xContentType(data)); + assertEquals(xcontentType(), MediaTypeRegistry.xContent(data)); } public void testMissingEndObject() throws IOException { diff --git a/server/src/test/java/org/opensearch/common/xcontent/XContentFactoryTests.java b/server/src/test/java/org/opensearch/common/xcontent/XContentFactoryTests.java index 866def49af991..1b7c535a550b0 100644 --- a/server/src/test/java/org/opensearch/common/xcontent/XContentFactoryTests.java +++ b/server/src/test/java/org/opensearch/common/xcontent/XContentFactoryTests.java @@ -37,6 +37,7 @@ import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.test.OpenSearchTestCase; @@ -64,14 +65,14 @@ public void testGuessCbor() throws IOException { } private void testGuessType(XContentType type) throws IOException { - XContentBuilder builder = XContentFactory.contentBuilder(type); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(type); builder.startObject(); builder.field("field1", "value1"); builder.endObject(); final BytesReference bytes; if (type == XContentType.JSON && randomBoolean()) { - final int length = randomIntBetween(0, 8 * XContentFactory.GUESS_HEADER_LENGTH); + final int length = randomIntBetween(0, 8 * MediaTypeRegistry.GUESS_HEADER_LENGTH); final String content = Strings.toString(builder); final StringBuilder sb = new StringBuilder(length + content.length()); final char[] chars = new char[length]; @@ -82,24 +83,24 @@ private void testGuessType(XContentType type) throws IOException { bytes = BytesReference.bytes(builder); } - assertThat(XContentHelper.xContentType(bytes), equalTo(type)); - assertThat(XContentFactory.xContentType(bytes.streamInput()), equalTo(type)); + assertThat(MediaTypeRegistry.xContentType(bytes), equalTo(type)); + assertThat(MediaTypeRegistry.xContentType(bytes.streamInput()), equalTo(type)); // CBOR is binary, cannot use String if (type != XContentType.CBOR && type != XContentType.SMILE) { - assertThat(XContentFactory.xContentType(Strings.toString(builder)), equalTo(type)); + assertThat(MediaTypeRegistry.xContentType(Strings.toString(builder)), equalTo(type)); } } public void testCBORBasedOnMajorObjectDetection() { // for this {"f "=> 5} perl encoder for example generates: byte[] bytes = new byte[] { (byte) 0xA1, (byte) 0x43, (byte) 0x66, (byte) 6f, (byte) 6f, (byte) 0x5 }; - assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.CBOR)); + assertThat(MediaTypeRegistry.xContent(bytes), equalTo(XContentType.CBOR)); // assertThat(((Number) XContentHelper.convertToMap(bytes, true).v2().get("foo")).intValue(), equalTo(5)); // this if for {"foo" : 5} in python CBOR bytes = new byte[] { (byte) 0xA1, (byte) 0x63, (byte) 0x66, (byte) 0x6f, (byte) 0x6f, (byte) 0x5 }; - assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.CBOR)); + assertThat(MediaTypeRegistry.xContent(bytes), equalTo(XContentType.CBOR)); assertThat(((Number) XContentHelper.convertToMap(new BytesArray(bytes), true).v2().get("foo")).intValue(), equalTo(5)); // also make sure major type check doesn't collide with SMILE and JSON, just in case @@ -111,36 +112,36 @@ public void testCBORBasedOnMajorObjectDetection() { public void testCBORBasedOnMagicHeaderDetection() { byte[] bytes = new byte[] { (byte) 0xd9, (byte) 0xd9, (byte) 0xf7 }; - assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.CBOR)); + assertThat(MediaTypeRegistry.xContent(bytes), equalTo(XContentType.CBOR)); } public void testEmptyStream() throws Exception { ByteArrayInputStream is = new ByteArrayInputStream(new byte[0]); - assertNull(XContentFactory.xContentType(is)); + assertNull(MediaTypeRegistry.xContentType(is)); is = new ByteArrayInputStream(new byte[] { (byte) 1 }); - assertNull(XContentFactory.xContentType(is)); + assertNull(MediaTypeRegistry.xContentType(is)); } public void testInvalidStream() throws Exception { byte[] bytes = new byte[] { (byte) '"' }; - assertNull(XContentFactory.xContentType(bytes)); + assertNull(MediaTypeRegistry.mediaTypeFromBytes(bytes, 0, bytes.length)); bytes = new byte[] { (byte) 'x' }; - assertNull(XContentFactory.xContentType(bytes)); + assertNull(MediaTypeRegistry.mediaTypeFromBytes(bytes, 0, bytes.length)); } public void testJsonFromBytesOptionallyPrecededByUtf8Bom() throws Exception { byte[] bytes = new byte[] { (byte) '{', (byte) '}' }; - assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.JSON)); + assertThat(MediaTypeRegistry.xContent(bytes), equalTo(XContentType.JSON)); bytes = new byte[] { (byte) 0x20, (byte) '{', (byte) '}' }; - assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.JSON)); + assertThat(MediaTypeRegistry.xContent(bytes), equalTo(XContentType.JSON)); bytes = new byte[] { (byte) 0xef, (byte) 0xbb, (byte) 0xbf, (byte) '{', (byte) '}' }; - assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.JSON)); + assertThat(MediaTypeRegistry.xContent(bytes), equalTo(XContentType.JSON)); bytes = new byte[] { (byte) 0xef, (byte) 0xbb, (byte) 0xbf, (byte) 0x20, (byte) '{', (byte) '}' }; - assertThat(XContentFactory.xContentType(bytes), equalTo(XContentType.JSON)); + assertThat(MediaTypeRegistry.xContent(bytes), equalTo(XContentType.JSON)); } } diff --git a/server/src/test/java/org/opensearch/common/xcontent/builder/XContentBuilderTests.java b/server/src/test/java/org/opensearch/common/xcontent/builder/XContentBuilderTests.java index 6c97297ce43e7..dc98000d93899 100644 --- a/server/src/test/java/org/opensearch/common/xcontent/builder/XContentBuilderTests.java +++ b/server/src/test/java/org/opensearch/common/xcontent/builder/XContentBuilderTests.java @@ -39,13 +39,13 @@ import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.io.PathUtils; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentOpenSearchExtension; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.core.xcontent.XContentGenerator; -import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.MediaTypeRegistry; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentGenerator; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.test.OpenSearchTestCase; import java.io.ByteArrayOutputStream; @@ -68,7 +68,7 @@ public class XContentBuilderTests extends OpenSearchTestCase { public void testPrettyWithLfAtEnd() throws Exception { ByteArrayOutputStream os = new ByteArrayOutputStream(); - XContentGenerator generator = XContentFactory.xContent(XContentType.JSON).createGenerator(os); + XContentGenerator generator = XContentType.JSON.xContent().createGenerator(os); generator.usePrettyPrint(); generator.usePrintLineFeedAtEnd(); @@ -87,7 +87,7 @@ public void testPrettyWithLfAtEnd() throws Exception { public void testReuseJsonGenerator() throws Exception { ByteArrayOutputStream os = new ByteArrayOutputStream(); - XContentGenerator generator = XContentFactory.xContent(XContentType.JSON).createGenerator(os); + XContentGenerator generator = XContentType.JSON.xContent().createGenerator(os); generator.writeStartObject(); generator.writeStringField("test", "value"); generator.writeEndObject(); @@ -107,14 +107,14 @@ public void testReuseJsonGenerator() throws Exception { public void testRaw() throws IOException { { - XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); xContentBuilder.startObject(); xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput()); xContentBuilder.endObject(); assertThat(Strings.toString(xContentBuilder), equalTo("{\"foo\":{\"test\":\"value\"}}")); } { - XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); xContentBuilder.startObject(); xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput()); xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}").streamInput()); @@ -122,7 +122,7 @@ public void testRaw() throws IOException { assertThat(Strings.toString(xContentBuilder), equalTo("{\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"}}")); } { - XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); xContentBuilder.startObject(); xContentBuilder.field("test", "value"); xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput()); @@ -130,7 +130,7 @@ public void testRaw() throws IOException { assertThat(Strings.toString(xContentBuilder), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"}}")); } { - XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); xContentBuilder.startObject(); xContentBuilder.field("test", "value"); xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput()); @@ -142,7 +142,7 @@ public void testRaw() throws IOException { ); } { - XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); xContentBuilder.startObject(); xContentBuilder.field("test", "value"); xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}").streamInput()); @@ -157,17 +157,17 @@ public void testRaw() throws IOException { } public void testSimpleGenerator() throws Exception { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.startObject().field("test", "value").endObject(); assertThat(Strings.toString(builder), equalTo("{\"test\":\"value\"}")); - builder = XContentFactory.contentBuilder(XContentType.JSON); + builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.startObject().field("test", "value").endObject(); assertThat(Strings.toString(builder), equalTo("{\"test\":\"value\"}")); } public void testOverloadedList() throws Exception { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.startObject().field("test", Arrays.asList("1", "2")).endObject(); assertThat(Strings.toString(builder), equalTo("{\"test\":[\"1\",\"2\"]}")); } @@ -175,7 +175,7 @@ public void testOverloadedList() throws Exception { public void testWritingBinaryToStream() throws Exception { BytesStreamOutput bos = new BytesStreamOutput(); - XContentGenerator gen = XContentFactory.xContent(XContentType.JSON).createGenerator(bos); + XContentGenerator gen = XContentType.JSON.xContent().createGenerator(bos); gen.writeStartObject(); gen.writeStringField("name", "something"); gen.flush(); @@ -189,7 +189,7 @@ public void testWritingBinaryToStream() throws Exception { } public void testByteConversion() throws Exception { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.startObject().field("test_name", (Byte) (byte) 120).endObject(); assertThat(BytesReference.bytes(builder).utf8ToString(), equalTo("{\"test_name\":120}")); } @@ -199,21 +199,21 @@ public void testDateTypesConversion() throws Exception { String expectedDate = XContentOpenSearchExtension.DEFAULT_DATE_PRINTER.print(date.getTime()); Calendar calendar = new GregorianCalendar(TimeZone.getTimeZone("UTC"), Locale.ROOT); String expectedCalendar = XContentOpenSearchExtension.DEFAULT_DATE_PRINTER.print(calendar.getTimeInMillis()); - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.startObject().timeField("date", date).endObject(); assertThat(Strings.toString(builder), equalTo("{\"date\":\"" + expectedDate + "\"}")); - builder = XContentFactory.contentBuilder(XContentType.JSON); + builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.startObject().field("calendar", calendar).endObject(); assertThat(Strings.toString(builder), equalTo("{\"calendar\":\"" + expectedCalendar + "\"}")); - builder = XContentFactory.contentBuilder(XContentType.JSON); + builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); Map map = new HashMap<>(); map.put("date", date); builder.map(map); assertThat(Strings.toString(builder), equalTo("{\"date\":\"" + expectedDate + "\"}")); - builder = XContentFactory.contentBuilder(XContentType.JSON); + builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); map = new HashMap<>(); map.put("calendar", calendar); builder.map(map); @@ -221,7 +221,7 @@ public void testDateTypesConversion() throws Exception { } public void testCopyCurrentStructure() throws Exception { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.startObject().field("test", "test field").startObject("filter").startObject("terms"); // up to 20k random terms @@ -248,7 +248,7 @@ public void testCopyCurrentStructure() throws Exception { } } else if (token == XContentParser.Token.START_OBJECT) { if ("filter".equals(currentFieldName)) { - filterBuilder = XContentFactory.contentBuilder(parser.contentType()); + filterBuilder = MediaTypeRegistry.contentBuilder(parser.contentType()); filterBuilder.copyCurrentStructure(parser); } } @@ -288,10 +288,10 @@ public void testHandlingOfPath_absolute() throws IOException { } private void checkPathSerialization(Path path) throws IOException { - XContentBuilder pathBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder pathBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); pathBuilder.startObject().field("file", path).endObject(); - XContentBuilder stringBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder stringBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); stringBuilder.startObject().field("file", path.toString()).endObject(); assertThat(Strings.toString(pathBuilder), equalTo(Strings.toString(stringBuilder))); @@ -301,10 +301,10 @@ public void testHandlingOfPath_StringName() throws IOException { Path path = PathUtils.get("path"); String name = new String("file"); - XContentBuilder pathBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder pathBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); pathBuilder.startObject().field(name, path).endObject(); - XContentBuilder stringBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder stringBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); stringBuilder.startObject().field(name, path.toString()).endObject(); assertThat(Strings.toString(pathBuilder), equalTo(Strings.toString(stringBuilder))); @@ -313,36 +313,36 @@ public void testHandlingOfPath_StringName() throws IOException { public void testHandlingOfCollectionOfPaths() throws IOException { Path path = PathUtils.get("path"); - XContentBuilder pathBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder pathBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); pathBuilder.startObject().field("file", Arrays.asList(path)).endObject(); - XContentBuilder stringBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder stringBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); stringBuilder.startObject().field("file", Arrays.asList(path.toString())).endObject(); assertThat(Strings.toString(pathBuilder), equalTo(Strings.toString(stringBuilder))); } public void testIndentIsPlatformIndependent() throws IOException { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON).prettyPrint(); builder.startObject().field("test", "foo").startObject("foo").field("foobar", "boom").endObject().endObject(); String string = Strings.toString(builder); assertEquals("{\n" + " \"test\" : \"foo\",\n" + " \"foo\" : {\n" + " \"foobar\" : \"boom\"\n" + " }\n" + "}", string); - builder = XContentFactory.contentBuilder(XContentType.YAML).prettyPrint(); + builder = MediaTypeRegistry.contentBuilder(XContentType.YAML).prettyPrint(); builder.startObject().field("test", "foo").startObject("foo").field("foobar", "boom").endObject().endObject(); string = Strings.toString(builder); assertEquals("---\n" + "test: \"foo\"\n" + "foo:\n" + " foobar: \"boom\"\n", string); } public void testRenderGeoPoint() throws IOException { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON).prettyPrint(); builder.startObject().field("foo").value(new GeoPoint(1, 2)).endObject(); String string = Strings.toString(builder); assertEquals("{\n" + " \"foo\" : {\n" + " \"lat\" : 1.0,\n" + " \"lon\" : 2.0\n" + " }\n" + "}", string.trim()); } public void testWriteMapWithNullKeys() throws IOException { - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); try { builder.map(Collections.singletonMap(null, "test")); fail("write map should have failed"); @@ -352,7 +352,7 @@ public void testWriteMapWithNullKeys() throws IOException { } public void testWriteMapValueWithNullKeys() throws IOException { - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); try { builder.map(Collections.singletonMap(null, "test")); fail("write map should have failed"); @@ -362,7 +362,7 @@ public void testWriteMapValueWithNullKeys() throws IOException { } public void testWriteFieldMapWithNullKeys() throws IOException { - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); try { builder.startObject(); builder.field("map", Collections.singletonMap(null, "test")); @@ -374,7 +374,7 @@ public void testWriteFieldMapWithNullKeys() throws IOException { public void testMissingEndObject() throws IOException { IllegalStateException e = expectThrows(IllegalStateException.class, () -> { - try (XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()))) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values()))) { builder.startObject(); builder.field("foo", true); } @@ -385,7 +385,7 @@ public void testMissingEndObject() throws IOException { public void testMissingEndArray() throws IOException { IllegalStateException e = expectThrows(IllegalStateException.class, () -> { - try (XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()))) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values()))) { builder.startObject(); builder.startArray("foo"); builder.value(0); diff --git a/server/src/test/java/org/opensearch/common/xcontent/cbor/JsonVsCborTests.java b/server/src/test/java/org/opensearch/common/xcontent/cbor/JsonVsCborTests.java index e304798a0ff0c..89add6ea78722 100644 --- a/server/src/test/java/org/opensearch/common/xcontent/cbor/JsonVsCborTests.java +++ b/server/src/test/java/org/opensearch/common/xcontent/cbor/JsonVsCborTests.java @@ -33,7 +33,6 @@ package org.opensearch.common.xcontent.cbor; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentGenerator; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -48,10 +47,10 @@ public class JsonVsCborTests extends OpenSearchTestCase { public void testCompareParsingTokens() throws IOException { BytesStreamOutput xsonOs = new BytesStreamOutput(); - XContentGenerator xsonGen = XContentFactory.xContent(XContentType.CBOR).createGenerator(xsonOs); + XContentGenerator xsonGen = XContentType.CBOR.xContent().createGenerator(xsonOs); BytesStreamOutput jsonOs = new BytesStreamOutput(); - XContentGenerator jsonGen = XContentFactory.xContent(XContentType.JSON).createGenerator(jsonOs); + XContentGenerator jsonGen = XContentType.JSON.xContent().createGenerator(jsonOs); xsonGen.writeStartObject(); jsonGen.writeStartObject(); diff --git a/server/src/test/java/org/opensearch/common/xcontent/smile/JsonVsSmileTests.java b/server/src/test/java/org/opensearch/common/xcontent/smile/JsonVsSmileTests.java index 4f26bdc7e9c26..a96031f2a1dad 100644 --- a/server/src/test/java/org/opensearch/common/xcontent/smile/JsonVsSmileTests.java +++ b/server/src/test/java/org/opensearch/common/xcontent/smile/JsonVsSmileTests.java @@ -33,7 +33,6 @@ package org.opensearch.common.xcontent.smile; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentGenerator; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -48,10 +47,10 @@ public class JsonVsSmileTests extends OpenSearchTestCase { public void testCompareParsingTokens() throws IOException { BytesStreamOutput xsonOs = new BytesStreamOutput(); - XContentGenerator xsonGen = XContentFactory.xContent(XContentType.SMILE).createGenerator(xsonOs); + XContentGenerator xsonGen = XContentType.SMILE.xContent().createGenerator(xsonOs); BytesStreamOutput jsonOs = new BytesStreamOutput(); - XContentGenerator jsonGen = XContentFactory.xContent(XContentType.JSON).createGenerator(jsonOs); + XContentGenerator jsonGen = XContentType.JSON.xContent().createGenerator(jsonOs); xsonGen.writeStartObject(); jsonGen.writeStartObject(); diff --git a/server/src/test/java/org/opensearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java b/server/src/test/java/org/opensearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java index c318dd89f573c..0a8df4406b5cd 100644 --- a/server/src/test/java/org/opensearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java +++ b/server/src/test/java/org/opensearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java @@ -39,7 +39,6 @@ import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.support.AbstractFilteringTestCase; @@ -89,7 +88,7 @@ static void assertXContentBuilderAsString(final XContentBuilder expected, final } static void assertXContentBuilderAsBytes(final XContentBuilder expected, final XContentBuilder actual) { - XContent xContent = XContentFactory.xContent(actual.contentType()); + XContent xContent = actual.contentType().xContent(); try ( XContentParser jsonParser = xContent.createParser( NamedXContentRegistry.EMPTY, diff --git a/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java index 518e5e880f4f8..9e467dca17409 100644 --- a/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java @@ -38,7 +38,7 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentHelper; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; @@ -75,7 +75,7 @@ public void testNoFormat() throws Exception { ) ); - assertThat(XContentFactory.xContentType(doc.source().toBytesRef().bytes), equalTo(XContentType.JSON)); + assertThat(MediaTypeRegistry.xContent(doc.source().toBytesRef().bytes), equalTo(XContentType.JSON)); documentMapper = parser.parse("type", new CompressedXContent(mapping)); doc = documentMapper.parse( @@ -87,7 +87,7 @@ public void testNoFormat() throws Exception { ) ); - assertThat(XContentHelper.xContentType(doc.source()), equalTo(XContentType.SMILE)); + assertThat(MediaTypeRegistry.xContentType(doc.source()), equalTo(XContentType.SMILE)); } public void testIncludes() throws Exception { diff --git a/server/src/test/java/org/opensearch/index/query/BoolQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/BoolQueryBuilderTests.java index 92efd19732ef0..54afde41daa5b 100644 --- a/server/src/test/java/org/opensearch/index/query/BoolQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/BoolQueryBuilderTests.java @@ -37,8 +37,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParseException; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -181,7 +181,7 @@ public void testIllegalArguments() { // https://github.com/elastic/elasticsearch/issues/7240 public void testEmptyBooleanQuery() throws Exception { - XContentBuilder contentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder contentBuilder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); contentBuilder.startObject().startObject("bool").endObject().endObject(); try (XContentParser xParser = createParser(contentBuilder)) { Query parsedQuery = parseQuery(xParser).toQuery(createShardContext()); diff --git a/server/src/test/java/org/opensearch/index/query/InnerHitBuilderTests.java b/server/src/test/java/org/opensearch/index/query/InnerHitBuilderTests.java index cb75889a0aa24..5e6226561582e 100644 --- a/server/src/test/java/org/opensearch/index/query/InnerHitBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/InnerHitBuilderTests.java @@ -34,9 +34,9 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.Strings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; @@ -133,7 +133,7 @@ public void testSerializationOrder() throws Exception { public void testFromAndToXContent() throws Exception { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { InnerHitBuilder innerHit = randomInnerHits(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); innerHit.toXContent(builder, ToXContent.EMPTY_PARAMS); // fields is printed out as an object but parsed into a List where order matters, we disable shuffling XContentBuilder shuffled = shuffleXContent(builder, "fields"); diff --git a/server/src/test/java/org/opensearch/index/snapshots/blobstore/FileInfoTests.java b/server/src/test/java/org/opensearch/index/snapshots/blobstore/FileInfoTests.java index 0f957517d48e1..200688f35352c 100644 --- a/server/src/test/java/org/opensearch/index/snapshots/blobstore/FileInfoTests.java +++ b/server/src/test/java/org/opensearch/index/snapshots/blobstore/FileInfoTests.java @@ -36,9 +36,9 @@ import org.opensearch.OpenSearchParseException; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; @@ -73,7 +73,7 @@ public void testToFromXContent() throws IOException { ); ByteSizeValue size = new ByteSizeValue(Math.abs(randomLong())); BlobStoreIndexShardSnapshot.FileInfo info = new BlobStoreIndexShardSnapshot.FileInfo("_foobar", meta, size); - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON).prettyPrint(); BlobStoreIndexShardSnapshot.FileInfo.toXContent(info, builder, ToXContent.EMPTY_PARAMS); byte[] xcontent = BytesReference.toBytes(BytesReference.bytes(shuffleXContent(builder))); @@ -126,7 +126,7 @@ public void testInvalidFieldsInFromXContent() throws IOException { fail("shouldn't be here"); } - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.startObject(); builder.field(FileInfo.NAME, name); builder.field(FileInfo.PHYSICAL_NAME, physicalName); diff --git a/server/src/test/java/org/opensearch/index/snapshots/blobstore/RemoteStoreShardShallowCopySnapshotTests.java b/server/src/test/java/org/opensearch/index/snapshots/blobstore/RemoteStoreShardShallowCopySnapshotTests.java index ccb89ec3d1547..eb2006124662a 100644 --- a/server/src/test/java/org/opensearch/index/snapshots/blobstore/RemoteStoreShardShallowCopySnapshotTests.java +++ b/server/src/test/java/org/opensearch/index/snapshots/blobstore/RemoteStoreShardShallowCopySnapshotTests.java @@ -10,9 +10,9 @@ import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; @@ -177,7 +177,7 @@ public void testFromXContentInvalid() throws IOException { fail("shouldn't be here"); } - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.startObject(); builder.field(RemoteStoreShardShallowCopySnapshot.VERSION, version); builder.field(RemoteStoreShardShallowCopySnapshot.NAME, snapshot); diff --git a/server/src/test/java/org/opensearch/ingest/IngestMetadataTests.java b/server/src/test/java/org/opensearch/ingest/IngestMetadataTests.java index fdaf6d145da64..82e6e1c9ff450 100644 --- a/server/src/test/java/org/opensearch/ingest/IngestMetadataTests.java +++ b/server/src/test/java/org/opensearch/ingest/IngestMetadataTests.java @@ -36,9 +36,9 @@ import org.opensearch.cluster.metadata.Metadata; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.test.OpenSearchTestCase; @@ -68,7 +68,7 @@ public void testFromXContent() throws IOException { map.put(pipeline.getId(), pipeline); map.put(pipeline2.getId(), pipeline2); IngestMetadata ingestMetadata = new IngestMetadata(map); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); builder.prettyPrint(); builder.startObject(); ingestMetadata.toXContent(builder, ToXContent.EMPTY_PARAMS); diff --git a/server/src/test/java/org/opensearch/persistent/PersistentTasksCustomMetadataTests.java b/server/src/test/java/org/opensearch/persistent/PersistentTasksCustomMetadataTests.java index 7fae4d5bb23e3..56e2ae56fdc71 100644 --- a/server/src/test/java/org/opensearch/persistent/PersistentTasksCustomMetadataTests.java +++ b/server/src/test/java/org/opensearch/persistent/PersistentTasksCustomMetadataTests.java @@ -54,7 +54,6 @@ import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.persistent.PersistentTasksCustomMetadata.Assignment; @@ -204,7 +203,7 @@ public void testSerializationContext() throws Exception { BytesReference shuffled = toShuffledXContent(testInstance, xContentType, params, false); PersistentTasksCustomMetadata newInstance; - try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { + try (XContentParser parser = createParser(xContentType.xContent(), shuffled)) { newInstance = doParseInstance(parser); } assertNotSame(newInstance, testInstance); diff --git a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetAliasesActionTests.java b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetAliasesActionTests.java index 9739419406851..1fbc628ed7f2c 100644 --- a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetAliasesActionTests.java +++ b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetAliasesActionTests.java @@ -33,8 +33,8 @@ package org.opensearch.rest.action.admin.indices; import org.opensearch.cluster.metadata.AliasMetadata; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.rest.RestResponse; import org.opensearch.test.OpenSearchTestCase; @@ -60,7 +60,7 @@ public class RestGetAliasesActionTests extends OpenSearchTestCase { // }' public void testBareRequest() throws Exception { - final XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + final XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); final Map> openMapBuilder = new HashMap<>(); final AliasMetadata foobarAliasMetadata = AliasMetadata.builder("foobar").build(); final AliasMetadata fooAliasMetadata = AliasMetadata.builder("foo").build(); @@ -72,7 +72,7 @@ public void testBareRequest() throws Exception { } public void testSimpleAliasWildcardMatchingNothing() throws Exception { - final XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + final XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); final Map> openMapBuilder = new HashMap<>(); final RestResponse restResponse = RestGetAliasesAction.buildRestResponse( true, @@ -86,7 +86,7 @@ public void testSimpleAliasWildcardMatchingNothing() throws Exception { } public void testMultipleAliasWildcardsSomeMatching() throws Exception { - final XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + final XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); final Map> openMapBuilder = new HashMap<>(); final AliasMetadata aliasMetadata = AliasMetadata.builder("foobar").build(); openMapBuilder.put("index", Arrays.asList(aliasMetadata)); @@ -102,7 +102,7 @@ public void testMultipleAliasWildcardsSomeMatching() throws Exception { } public void testAliasWildcardsIncludeAndExcludeAll() throws Exception { - final XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + final XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); final Map> openMapBuilder = new HashMap<>(); final RestResponse restResponse = RestGetAliasesAction.buildRestResponse( true, @@ -116,7 +116,7 @@ public void testAliasWildcardsIncludeAndExcludeAll() throws Exception { } public void testAliasWildcardsIncludeAndExcludeSome() throws Exception { - final XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + final XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); final Map> openMapBuilder = new HashMap<>(); final AliasMetadata aliasMetadata = AliasMetadata.builder("foo").build(); openMapBuilder.put("index", Arrays.asList(aliasMetadata)); @@ -132,7 +132,7 @@ public void testAliasWildcardsIncludeAndExcludeSome() throws Exception { } public void testAliasWildcardsIncludeAndExcludeSomeAndExplicitMissing() throws Exception { - final XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + final XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); final Map> openMapBuilder = new HashMap<>(); final AliasMetadata aliasMetadata = AliasMetadata.builder("foo").build(); openMapBuilder.put("index", Arrays.asList(aliasMetadata)); @@ -153,7 +153,7 @@ public void testAliasWildcardsIncludeAndExcludeSomeAndExplicitMissing() throws E } public void testAliasWildcardsExcludeExplicitMissing() throws Exception { - final XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + final XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); final Map> openMapBuilder = new HashMap<>(); final RestResponse restResponse = RestGetAliasesAction.buildRestResponse( true, diff --git a/server/src/test/java/org/opensearch/script/ScriptTests.java b/server/src/test/java/org/opensearch/script/ScriptTests.java index 47b4557c3e5e1..bb74bd2e5f6b0 100644 --- a/server/src/test/java/org/opensearch/script/ScriptTests.java +++ b/server/src/test/java/org/opensearch/script/ScriptTests.java @@ -37,6 +37,7 @@ import org.opensearch.core.common.io.stream.InputStreamStreamInput; import org.opensearch.core.common.io.stream.OutputStreamStreamOutput; import org.opensearch.common.settings.Settings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -58,7 +59,7 @@ public class ScriptTests extends OpenSearchTestCase { public void testScriptParsing() throws IOException { Script expectedScript = createScript(); - try (XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()))) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values()))) { expectedScript.toXContent(builder, ToXContent.EMPTY_PARAMS); try (XContentParser parser = createParser(builder)) { Script actualScript = Script.parse(parser); @@ -103,7 +104,7 @@ private Script createScript() throws IOException { public void testParse() throws IOException { Script expectedScript = createScript(); - try (XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()))) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values()))) { expectedScript.toXContent(builder, ToXContent.EMPTY_PARAMS); try (XContentParser xParser = createParser(builder)) { Settings settings = Settings.fromXContent(xParser); diff --git a/server/src/test/java/org/opensearch/script/StoredScriptTests.java b/server/src/test/java/org/opensearch/script/StoredScriptTests.java index c16c4f93e6582..96c2b19bf204f 100644 --- a/server/src/test/java/org/opensearch/script/StoredScriptTests.java +++ b/server/src/test/java/org/opensearch/script/StoredScriptTests.java @@ -37,8 +37,8 @@ import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.test.AbstractSerializingTestCase; @@ -73,7 +73,7 @@ public void testInvalidDelete() { public void testSourceParsing() throws Exception { // simple script value string - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON)) { builder.startObject().startObject("script").field("lang", "lang").field("source", "code").endObject().endObject(); StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); @@ -83,7 +83,7 @@ public void testSourceParsing() throws Exception { } // complex template using script as the field name - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON)) { builder.startObject() .startObject("script") .field("lang", "mustache") @@ -94,7 +94,7 @@ public void testSourceParsing() throws Exception { .endObject(); String code; - try (XContentBuilder cb = XContentFactory.contentBuilder(builder.contentType())) { + try (XContentBuilder cb = MediaTypeRegistry.contentBuilder(builder.contentType())) { code = Strings.toString(cb.startObject().field("query", "code").endObject()); } @@ -109,7 +109,7 @@ public void testSourceParsing() throws Exception { } // complex script with script object - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON)) { builder.startObject().field("script").startObject().field("lang", "lang").field("source", "code").endObject().endObject(); StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); @@ -119,7 +119,7 @@ public void testSourceParsing() throws Exception { } // complex script using "code" backcompat - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON)) { builder.startObject().field("script").startObject().field("lang", "lang").field("code", "code").endObject().endObject(); StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); @@ -130,7 +130,7 @@ public void testSourceParsing() throws Exception { assertWarnings("Deprecated field [code] used, expected [source] instead"); // complex script with script object and empty options - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON)) { builder.startObject() .field("script") .startObject() @@ -149,7 +149,7 @@ public void testSourceParsing() throws Exception { } // complex script with embedded template - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON)) { Strings.toString( builder.startObject() .field("script") @@ -165,7 +165,7 @@ public void testSourceParsing() throws Exception { ); String code; - try (XContentBuilder cb = XContentFactory.contentBuilder(builder.contentType())) { + try (XContentBuilder cb = MediaTypeRegistry.contentBuilder(builder.contentType())) { code = Strings.toString(cb.startObject().field("query", "code").endObject()); } @@ -182,7 +182,7 @@ public void testSourceParsing() throws Exception { public void testSourceParsingErrors() throws Exception { // check for missing lang parameter when parsing a script - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON)) { builder.startObject().field("script").startObject().field("source", "code").endObject().endObject(); IllegalArgumentException iae = expectThrows( @@ -193,7 +193,7 @@ public void testSourceParsingErrors() throws Exception { } // check for missing source parameter when parsing a script - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON)) { builder.startObject().field("script").startObject().field("lang", "lang").endObject().endObject(); IllegalArgumentException iae = expectThrows( @@ -204,7 +204,7 @@ public void testSourceParsingErrors() throws Exception { } // check for illegal options parameter when parsing a script - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON)) { builder.startObject() .field("script") .startObject() @@ -224,7 +224,7 @@ public void testSourceParsingErrors() throws Exception { } // check for unsupported template context - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON)) { builder.startObject().field("template", "code").endObject(); ParsingException pEx = expectThrows( ParsingException.class, @@ -238,7 +238,7 @@ public void testSourceParsingErrors() throws Exception { } public void testEmptyTemplateDeprecations() throws IOException { - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON)) { builder.startObject().endObject(); StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); @@ -248,7 +248,7 @@ public void testEmptyTemplateDeprecations() throws IOException { assertWarnings("empty templates should no longer be used"); } - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON)) { builder.startObject().field("script").startObject().field("lang", "mustache").field("source", "").endObject().endObject(); StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); diff --git a/server/src/test/java/org/opensearch/search/NestedIdentityTests.java b/server/src/test/java/org/opensearch/search/NestedIdentityTests.java index 341570247ffa9..17b1da2ba3a8c 100644 --- a/server/src/test/java/org/opensearch/search/NestedIdentityTests.java +++ b/server/src/test/java/org/opensearch/search/NestedIdentityTests.java @@ -35,9 +35,9 @@ import org.opensearch.common.Strings; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; @@ -66,7 +66,7 @@ public static NestedIdentity createTestItem(int depth) { public void testFromXContent() throws IOException { NestedIdentity nestedIdentity = createTestItem(randomInt(3)); XContentType xcontentType = randomFrom(XContentType.values()); - XContentBuilder builder = XContentFactory.contentBuilder(xcontentType); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(xcontentType); if (randomBoolean()) { builder.prettyPrint(); } diff --git a/server/src/test/java/org/opensearch/search/aggregations/AggregatorFactoriesTests.java b/server/src/test/java/org/opensearch/search/aggregations/AggregatorFactoriesTests.java index 1f29a17deeaa8..3a527ec7455d3 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/AggregatorFactoriesTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/AggregatorFactoriesTests.java @@ -37,10 +37,10 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.core.common.Strings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; @@ -239,7 +239,7 @@ public void testInvalidType() throws Exception { public void testRewriteAggregation() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); BytesReference bytesReference; - try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(xContentType)) { builder.startObject(); { builder.startObject("terms"); diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/FiltersTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/FiltersTests.java index 3a8829d5a6433..136210a636224 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/FiltersTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/FiltersTests.java @@ -32,8 +32,8 @@ package org.opensearch.search.aggregations.bucket; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.BoolQueryBuilder; @@ -106,7 +106,7 @@ public void testFiltersSortedByKey() { } public void testOtherBucket() throws IOException { - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); builder.startArray("filters").startObject().startObject("term").field("field", "foo").endObject().endObject().endArray(); builder.endObject(); @@ -116,7 +116,7 @@ public void testOtherBucket() throws IOException { // The other bucket is disabled by default assertFalse(filters.otherBucket()); - builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); builder.startArray("filters").startObject().startObject("term").field("field", "foo").endObject().endObject().endArray(); builder.field("other_bucket_key", "some_key"); @@ -128,7 +128,7 @@ public void testOtherBucket() throws IOException { // but setting a key enables it automatically assertTrue(filters.otherBucket()); - builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); builder.startArray("filters").startObject().startObject("term").field("field", "foo").endObject().endObject().endArray(); builder.field("other_bucket", false); @@ -207,7 +207,7 @@ public void testRewritePreservesOtherBucket() throws IOException { public void testEmptyFilters() throws IOException { { - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); builder.startArray("filters").endArray(); // unkeyed array builder.endObject(); @@ -221,7 +221,7 @@ public void testEmptyFilters() throws IOException { } { - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); builder.startObject("filters").endObject(); // keyed object builder.endObject(); diff --git a/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java index 1423fd6771674..d0b3d84bc78f9 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java @@ -37,9 +37,9 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongBitSet; import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.fielddata.AbstractSortedSetDocValues; @@ -218,7 +218,7 @@ public void testRegexExclude() throws IOException { // Serializes/deserializes an IncludeExclude statement with a single clause private IncludeExclude serialize(IncludeExclude incExc, ParseField field) throws IOException { - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { builder.prettyPrint(); } @@ -264,7 +264,7 @@ public void testRegexIncludeAndExclude() throws IOException { // Serializes/deserializes the IncludeExclude statement with include AND // exclude clauses private IncludeExclude serializeMixedRegex(IncludeExclude incExc) throws IOException { - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { builder.prettyPrint(); } diff --git a/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java b/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java index 461812077eba9..51ff64321acfa 100644 --- a/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java @@ -39,9 +39,9 @@ import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -72,7 +72,7 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { public void testFromXContent() throws IOException { SearchSourceBuilder testSearchSourceBuilder = createSearchSourceBuilder(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { builder.prettyPrint(); } @@ -425,7 +425,7 @@ public void testToXContent() throws IOException { XContentType xContentType = randomFrom(XContentType.values()); { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - XContentBuilder builder = XContentFactory.contentBuilder(xContentType); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(xContentType); searchSourceBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); BytesReference source = BytesReference.bytes(builder); Map sourceAsMap = XContentHelper.convertToMap(source, false, xContentType).v2(); @@ -434,7 +434,7 @@ public void testToXContent() throws IOException { { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(RandomQueryBuilder.createQuery(random())); - XContentBuilder builder = XContentFactory.contentBuilder(xContentType); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(xContentType); searchSourceBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); BytesReference source = BytesReference.bytes(builder); Map sourceAsMap = XContentHelper.convertToMap(source, false, xContentType).v2(); @@ -448,7 +448,7 @@ public void testToXContentWithPointInTime() throws IOException { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); TimeValue keepAlive = randomBoolean() ? TimeValue.timeValueHours(1) : null; searchSourceBuilder.pointInTimeBuilder(new PointInTimeBuilder("id").setKeepAlive(keepAlive)); - XContentBuilder builder = XContentFactory.contentBuilder(xContentType); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(xContentType); searchSourceBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); BytesReference bytes = BytesReference.bytes(builder); Map sourceAsMap = XContentHelper.convertToMap(bytes, false, xContentType).v2(); diff --git a/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index b1dfd836f7461..6d31986cabdc4 100644 --- a/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -41,10 +41,10 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.BigArrays; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParseException; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -137,7 +137,7 @@ public void testEqualsAndHashcode() throws IOException { public void testFromXContent() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { HighlightBuilder highlightBuilder = randomHighlighterBuilder(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { builder.prettyPrint(); } @@ -547,7 +547,7 @@ public void testOrderSerialization() throws Exception { } protected static XContentBuilder toXContent(HighlightBuilder highlight, XContentType contentType) throws IOException { - XContentBuilder builder = XContentFactory.contentBuilder(contentType); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(contentType); if (randomBoolean()) { builder.prettyPrint(); } diff --git a/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/HighlightFieldTests.java b/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/HighlightFieldTests.java index a66c7de6ced74..3fb2718f5ccc3 100644 --- a/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/HighlightFieldTests.java +++ b/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/HighlightFieldTests.java @@ -36,9 +36,9 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.text.Text; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; @@ -82,7 +82,7 @@ public void testReplaceUnicodeControlCharacters() { public void testFromXContent() throws IOException { HighlightField highlightField = createTestItem(); XContentType xcontentType = randomFrom(XContentType.values()); - XContentBuilder builder = XContentFactory.contentBuilder(xcontentType); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(xcontentType); if (randomBoolean()) { builder.prettyPrint(); } diff --git a/server/src/test/java/org/opensearch/search/internal/ShardSearchRequestTests.java b/server/src/test/java/org/opensearch/search/internal/ShardSearchRequestTests.java index 1f632251cf588..4913ee92aa79b 100644 --- a/server/src/test/java/org/opensearch/search/internal/ShardSearchRequestTests.java +++ b/server/src/test/java/org/opensearch/search/internal/ShardSearchRequestTests.java @@ -44,6 +44,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -220,7 +221,7 @@ public QueryBuilder aliasFilter(IndexMetadata indexMetadata, String... aliasName return ShardSearchRequest.parseAliasFilter(bytes -> { try ( InputStream inputStream = bytes.streamInput(); - XContentParser parser = XContentFactory.xContentType(inputStream) + XContentParser parser = MediaTypeRegistry.xContentType(inputStream) .xContent() .createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, inputStream) ) { diff --git a/server/src/test/java/org/opensearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/opensearch/search/rescore/QueryRescorerBuilderTests.java index aa7bcf4d63e3d..199ab7d76ec99 100644 --- a/server/src/test/java/org/opensearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/rescore/QueryRescorerBuilderTests.java @@ -40,11 +40,11 @@ import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.BigArrays; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedObjectNotFoundException; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParseException; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -130,7 +130,7 @@ private RescorerBuilder copy(RescorerBuilder original) throws IOException public void testFromXContent() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { RescorerBuilder rescoreBuilder = randomRescoreBuilder(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { builder.prettyPrint(); } diff --git a/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java b/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java index 47579e31dc7c2..811d3d2784595 100644 --- a/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java @@ -42,6 +42,7 @@ import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.common.text.Text; import org.opensearch.common.util.BigArrays; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; @@ -190,7 +191,7 @@ public void testEqualsAndHashcode() throws Exception { public void testFromXContent() throws Exception { for (int runs = 0; runs < 20; runs++) { SearchAfterBuilder searchAfterBuilder = randomJsonSearchFromBuilder(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { builder.prettyPrint(); } @@ -216,7 +217,7 @@ public void testFromXContentIllegalType() throws Exception { if (type == XContentType.JSON || type == XContentType.YAML) { continue; } - XContentBuilder xContent = XContentFactory.contentBuilder(type); + XContentBuilder xContent = MediaTypeRegistry.contentBuilder(type); xContent.startObject().startArray("search_after").value(new BigDecimal("9223372036854776003.3")).endArray().endObject(); try (XContentParser parser = createParser(xContent)) { parser.nextToken(); diff --git a/server/src/test/java/org/opensearch/search/slice/SliceBuilderTests.java b/server/src/test/java/org/opensearch/search/slice/SliceBuilderTests.java index 28d96c916bc41..fbbc667cbcb42 100644 --- a/server/src/test/java/org/opensearch/search/slice/SliceBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/slice/SliceBuilderTests.java @@ -56,9 +56,9 @@ import org.opensearch.common.Nullable; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.Strings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.IndexSettings; @@ -208,7 +208,7 @@ public void testEqualsAndHashcode() throws Exception { public void testFromXContent() throws Exception { SliceBuilder sliceBuilder = randomSliceBuilder(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { builder.prettyPrint(); } diff --git a/server/src/test/java/org/opensearch/search/sort/AbstractSortTestCase.java b/server/src/test/java/org/opensearch/search/sort/AbstractSortTestCase.java index e8d713d15b337..02f32faa6ffd9 100644 --- a/server/src/test/java/org/opensearch/search/sort/AbstractSortTestCase.java +++ b/server/src/test/java/org/opensearch/search/sort/AbstractSortTestCase.java @@ -40,10 +40,10 @@ import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.BigArrays; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.env.Environment; @@ -131,7 +131,7 @@ public void testFromXContent() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { T testItem = createTestItem(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { builder.prettyPrint(); } diff --git a/server/src/test/java/org/opensearch/search/sort/NestedSortBuilderTests.java b/server/src/test/java/org/opensearch/search/sort/NestedSortBuilderTests.java index bf37e822bfc3c..f8f2564dacbcb 100644 --- a/server/src/test/java/org/opensearch/search/sort/NestedSortBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/sort/NestedSortBuilderTests.java @@ -34,10 +34,10 @@ import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.ConstantScoreQueryBuilder; @@ -83,7 +83,7 @@ protected NamedXContentRegistry xContentRegistry() { public void testFromXContent() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { NestedSortBuilder testItem = createRandomNestedSort(3); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); testItem.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); try (XContentParser parser = createParser(shuffled)) { diff --git a/server/src/test/java/org/opensearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/server/src/test/java/org/opensearch/search/suggest/AbstractSuggestionBuilderTestCase.java index eee13ec56d841..1ac97fded27ce 100644 --- a/server/src/test/java/org/opensearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/server/src/test/java/org/opensearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -39,10 +39,10 @@ import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.BigArrays; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.index.Index; @@ -146,7 +146,7 @@ public void testEqualsAndHashcode() { public void testFromXContent() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { SB suggestionBuilder = randomTestBuilder(); - XContentBuilder xContentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { xContentBuilder.prettyPrint(); } diff --git a/server/src/test/java/org/opensearch/search/suggest/SuggestBuilderTests.java b/server/src/test/java/org/opensearch/search/suggest/SuggestBuilderTests.java index f8996c528cf32..f2d1acc35f356 100644 --- a/server/src/test/java/org/opensearch/search/suggest/SuggestBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/suggest/SuggestBuilderTests.java @@ -34,10 +34,10 @@ import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.search.SearchModule; @@ -82,7 +82,7 @@ public static void afterClass() { public void testFromXContent() throws IOException { for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) { SuggestBuilder suggestBuilder = randomSuggestBuilder(); - XContentBuilder xContentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { xContentBuilder.prettyPrint(); } diff --git a/server/src/test/java/org/opensearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/server/src/test/java/org/opensearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index 8484861999c3f..d0e42f8b91e2a 100644 --- a/server/src/test/java/org/opensearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/server/src/test/java/org/opensearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -47,9 +47,9 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParseException; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; @@ -140,7 +140,7 @@ private static DirectCandidateGeneratorBuilder mutate(DirectCandidateGeneratorBu public void testFromXContent() throws IOException { for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) { DirectCandidateGeneratorBuilder generator = randomCandidateGenerator(); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { builder.prettyPrint(); } diff --git a/server/src/test/java/org/opensearch/search/suggest/phrase/SmoothingModelTestCase.java b/server/src/test/java/org/opensearch/search/suggest/phrase/SmoothingModelTestCase.java index a4adf03cff316..9b807ee35e4bf 100644 --- a/server/src/test/java/org/opensearch/search/suggest/phrase/SmoothingModelTestCase.java +++ b/server/src/test/java/org/opensearch/search/suggest/phrase/SmoothingModelTestCase.java @@ -45,9 +45,9 @@ import org.apache.lucene.store.ByteBuffersDirectory; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.lucene.BytesRefs; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.search.SearchModule; @@ -101,7 +101,7 @@ public static void afterClass() throws Exception { */ public void testFromXContent() throws IOException { SmoothingModel testModel = createTestModel(); - XContentBuilder contentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder contentBuilder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { contentBuilder.prettyPrint(); } diff --git a/server/src/test/java/org/opensearch/tasks/TaskResultTests.java b/server/src/test/java/org/opensearch/tasks/TaskResultTests.java index f27a920d812e9..fac7a0538090a 100644 --- a/server/src/test/java/org/opensearch/tasks/TaskResultTests.java +++ b/server/src/test/java/org/opensearch/tasks/TaskResultTests.java @@ -36,9 +36,9 @@ import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.test.OpenSearchTestCase; @@ -120,7 +120,7 @@ private XContentBuilder addRandomUnknownFields(XContentBuilder builder) throws I map.put("unknown_field" + i, Collections.singletonMap("inner", randomAlphaOfLength(20))); } } - XContentBuilder xContentBuilder = XContentFactory.contentBuilder(parser.contentType()); + XContentBuilder xContentBuilder = MediaTypeRegistry.contentBuilder(parser.contentType()); return xContentBuilder.map(map); } } diff --git a/test/framework/src/main/java/org/opensearch/index/RandomCreateIndexGenerator.java b/test/framework/src/main/java/org/opensearch/index/RandomCreateIndexGenerator.java index 3c20f22e392b8..8d951c603425d 100644 --- a/test/framework/src/main/java/org/opensearch/index/RandomCreateIndexGenerator.java +++ b/test/framework/src/main/java/org/opensearch/index/RandomCreateIndexGenerator.java @@ -35,8 +35,8 @@ import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.action.admin.indices.create.CreateIndexRequest; import org.opensearch.common.settings.Settings; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import java.io.IOException; @@ -96,7 +96,7 @@ public static Settings randomIndexSettings() { * Creates a random mapping */ public static XContentBuilder randomMapping() throws IOException { - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(randomFrom(XContentType.values())); builder.startObject(); randomMappingFields(builder, true); diff --git a/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java b/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java index 6a715599a8e1a..9e4e59d9a4d15 100644 --- a/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java +++ b/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java @@ -33,8 +33,8 @@ package org.opensearch.index.engine; import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.index.IndexSettings; import org.opensearch.index.VersionType; import org.opensearch.index.analysis.AnalysisRegistry; @@ -137,7 +137,13 @@ public Engine.Operation convertToEngineOp(Translog.Operation operation, Engine.O final String indexName = mapperService.index().getName(); final Engine.Index engineIndex = IndexShard.prepareIndex( docMapper(MapperService.SINGLE_MAPPING_NAME), - new SourceToParse(indexName, index.id(), index.source(), XContentHelper.xContentType(index.source()), index.routing()), + new SourceToParse( + indexName, + index.id(), + index.source(), + MediaTypeRegistry.xContentType(index.source()), + index.routing() + ), index.seqNo(), index.primaryTerm(), index.version(), diff --git a/test/framework/src/main/java/org/opensearch/search/RandomSearchRequestGenerator.java b/test/framework/src/main/java/org/opensearch/search/RandomSearchRequestGenerator.java index e7b1dd2bde1ea..5480c4629ebf6 100644 --- a/test/framework/src/main/java/org/opensearch/search/RandomSearchRequestGenerator.java +++ b/test/framework/src/main/java/org/opensearch/search/RandomSearchRequestGenerator.java @@ -353,7 +353,7 @@ public static SearchSourceBuilder randomSearchSourceBuilder( } jsonBuilder.endArray(); jsonBuilder.endObject(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) + XContentParser parser = XContentType.JSON.xContent() .createParser( NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/BaseAggregationTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/BaseAggregationTestCase.java index 89d705c8fd1e8..b7b5fa091810d 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/BaseAggregationTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/BaseAggregationTestCase.java @@ -36,9 +36,9 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.search.aggregations.support.ValuesSourceAggregationBuilder; @@ -67,7 +67,7 @@ public abstract class BaseAggregationTestCase convertToMap(ToXContent part) throws IOExcepti } public static BytesReference convertToXContent(Map map, XContentType xContentType) throws IOException { - try (XContentBuilder builder = XContentFactory.contentBuilder(xContentType)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(xContentType)) { builder.map(map); return BytesReference.bytes(builder); } diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java index 635dc49ff5166..3045b3fe66edc 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -43,8 +43,8 @@ import org.opensearch.client.NodeSelector; import org.opensearch.core.xcontent.MediaType; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import java.io.IOException; @@ -185,7 +185,7 @@ private MediaType getContentType(Map headers, XContentType[] sup private BytesRef bodyAsBytesRef(Map bodyAsMap, MediaType mediaType) throws IOException { Map finalBodyAsMap = stash.replaceStashedValues(bodyAsMap); - try (XContentBuilder builder = XContentFactory.contentBuilder(mediaType)) { + try (XContentBuilder builder = MediaTypeRegistry.contentBuilder(mediaType)) { return BytesReference.bytes(builder.map(finalBodyAsMap)).toBytesRef(); } } diff --git a/test/framework/src/test/java/org/opensearch/test/rest/yaml/ObjectPathTests.java b/test/framework/src/test/java/org/opensearch/test/rest/yaml/ObjectPathTests.java index a6482ed09b253..ca07e39719e23 100644 --- a/test/framework/src/test/java/org/opensearch/test/rest/yaml/ObjectPathTests.java +++ b/test/framework/src/test/java/org/opensearch/test/rest/yaml/ObjectPathTests.java @@ -31,10 +31,9 @@ package org.opensearch.test.rest.yaml; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; @@ -56,7 +55,7 @@ public class ObjectPathTests extends OpenSearchTestCase { private static XContentBuilder randomXContentBuilder() throws IOException { XContentType xContentType = randomFrom(XContentType.values()); - return XContentBuilder.builder(XContentFactory.xContent(xContentType)); + return XContentBuilder.builder(xContentType.xContent()); } public void testEvaluateObjectPathEscape() throws Exception { @@ -342,7 +341,7 @@ public void testEvaluateArrayAsRoot() throws Exception { xContentBuilder.endObject(); xContentBuilder.endArray(); ObjectPath objectPath = ObjectPath.createFromXContent( - XContentFactory.xContent(xContentBuilder.contentType()), + xContentBuilder.contentType().xContent(), BytesReference.bytes(xContentBuilder) ); Object object = objectPath.evaluate(""); From 883559c8b985521dcf11a326f2ee3ae6f8bb4864 Mon Sep 17 00:00:00 2001 From: Poojita Raj Date: Fri, 28 Jul 2023 13:34:47 -0700 Subject: [PATCH 23/75] Unmute test SegmentReplicationIndexShardTests#testNRTReplicaWithRemoteStorePromotedAsPrimaryCommitCommit (#8931) Signed-off-by: Poojita Raj --- .../index/shard/SegmentReplicationIndexShardTests.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java index cc4fa6f28bafc..9107606326150 100644 --- a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java @@ -887,7 +887,6 @@ public void testNRTReplicaWithRemoteStorePromotedAsPrimaryCommitRefresh() throws testNRTReplicaWithRemoteStorePromotedAsPrimary(true, false); } - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8817") public void testNRTReplicaWithRemoteStorePromotedAsPrimaryCommitCommit() throws Exception { testNRTReplicaWithRemoteStorePromotedAsPrimary(true, true); } From a3aab67ee86bf171a7eb480f0933e0b955fbf4f3 Mon Sep 17 00:00:00 2001 From: Kunal Kotwani Date: Fri, 28 Jul 2023 17:06:42 -0700 Subject: [PATCH 24/75] Add restore level safeguards to prevent file cache oversubscription (#8606) Signed-off-by: Kunal Kotwani --- CHANGELOG.md | 1 + .../restore/RestoreSnapshotRequest.java | 2 +- .../cluster/routing/RoutingTable.java | 10 ++ .../decider/DiskThresholdDecider.java | 6 +- .../common/settings/ClusterSettings.java | 2 + .../store/remote/filecache/FileCache.java | 17 ++- .../main/java/org/opensearch/node/Node.java | 5 +- .../opensearch/snapshots/RestoreService.java | 83 ++++++++-- .../restore/RestoreSnapshotRequestTests.java | 4 + .../cluster/routing/RoutingTableTests.java | 43 ++++++ .../decider/DiskThresholdDeciderTests.java | 2 + .../snapshots/SnapshotResiliencyTests.java | 143 +++++++++++++++++- .../opensearch/test/OpenSearchTestCase.java | 2 + 13 files changed, 300 insertions(+), 20 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2f20b3b8d7b51..861674753861a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -79,6 +79,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add server version as REST response header [#6583](https://github.com/opensearch-project/OpenSearch/issues/6583) - Start replication checkpointTimers on primary before segments upload to remote store. ([#8221]()https://github.com/opensearch-project/OpenSearch/pull/8221) - [distribution/archives] [Linux] [x64] Provide the variant of the distributions bundled with JRE ([#8195]()https://github.com/opensearch-project/OpenSearch/pull/8195) +- Add configuration for file cache size to max remote data ratio to prevent oversubscription of file cache ([#8606](https://github.com/opensearch-project/OpenSearch/pull/8606)) ### Dependencies - Bump `org.apache.logging.log4j:log4j-core` from 2.17.1 to 2.20.0 ([#8307](https://github.com/opensearch-project/OpenSearch/pull/8307)) diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java index 840564a4bd7a2..7a142e70305ae 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java @@ -517,7 +517,7 @@ public String snapshotUuid() { /** * Sets the storage type for this request. */ - RestoreSnapshotRequest storageType(StorageType storageType) { + public RestoreSnapshotRequest storageType(StorageType storageType) { this.storageType = storageType; return this; } diff --git a/server/src/main/java/org/opensearch/cluster/routing/RoutingTable.java b/server/src/main/java/org/opensearch/cluster/routing/RoutingTable.java index 7934649a6d3eb..d6a67bc714689 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/RoutingTable.java +++ b/server/src/main/java/org/opensearch/cluster/routing/RoutingTable.java @@ -295,6 +295,16 @@ public ShardsIterator allShardsIncludingRelocationTargets(String[] indices) { return allShardsSatisfyingPredicate(indices, shardRouting -> true, true); } + /** + * All the shards on the node which match the predicate + * @param predicate condition to match + * @return iterator over shards matching the predicate + */ + public ShardsIterator allShardsSatisfyingPredicate(Predicate predicate) { + String[] indices = indicesRouting.keySet().toArray(new String[0]); + return allShardsSatisfyingPredicate(indices, predicate, false); + } + private ShardsIterator allShardsSatisfyingPredicate( String[] indices, Predicate predicate, diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 4b69c05807ae4..61b96184abcc4 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -68,7 +68,7 @@ import static org.opensearch.cluster.routing.RoutingPool.getShardPool; import static org.opensearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING; import static org.opensearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING; -import static org.opensearch.index.store.remote.filecache.FileCache.DATA_TO_FILE_CACHE_SIZE_RATIO; +import static org.opensearch.index.store.remote.filecache.FileCache.DATA_TO_FILE_CACHE_SIZE_RATIO_SETTING; /** * The {@link DiskThresholdDecider} checks that the node a shard is potentially @@ -199,8 +199,8 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing final FileCacheStats fileCacheStats = clusterInfo.getNodeFileCacheStats().getOrDefault(node.nodeId(), null); final long nodeCacheSize = fileCacheStats != null ? fileCacheStats.getTotal().getBytes() : 0; final long totalNodeRemoteShardSize = currentNodeRemoteShardSize + shardSize; - - if (totalNodeRemoteShardSize > DATA_TO_FILE_CACHE_SIZE_RATIO * nodeCacheSize) { + final double dataToFileCacheSizeRatio = DATA_TO_FILE_CACHE_SIZE_RATIO_SETTING.get(allocation.metadata().settings()); + if (dataToFileCacheSizeRatio > 0.0f && totalNodeRemoteShardSize > dataToFileCacheSizeRatio * nodeCacheSize) { return allocation.decision( Decision.NO, NAME, diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index 46a43842451d9..9da9e1b14d307 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -44,6 +44,7 @@ import org.opensearch.index.ShardIndexingPressureMemoryManager; import org.opensearch.index.ShardIndexingPressureSettings; import org.opensearch.index.ShardIndexingPressureStore; +import org.opensearch.index.store.remote.filecache.FileCache; import org.opensearch.search.backpressure.settings.NodeDuressSettings; import org.opensearch.search.backpressure.settings.SearchBackpressureSettings; import org.opensearch.search.backpressure.settings.SearchShardTaskSettings; @@ -643,6 +644,7 @@ public void apply(Settings value, Settings current, Settings previous) { // Settings related to Searchable Snapshots Node.NODE_SEARCH_CACHE_SIZE_SETTING, + FileCache.DATA_TO_FILE_CACHE_SIZE_RATIO_SETTING, // Settings related to Remote Refresh Segment Pressure RemoteRefreshSegmentPressureSettings.REMOTE_REFRESH_SEGMENT_PRESSURE_ENABLED, diff --git a/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCache.java b/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCache.java index 3d23b4d22538c..47b891fdb8d21 100644 --- a/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCache.java +++ b/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCache.java @@ -11,6 +11,7 @@ import org.apache.lucene.store.IndexInput; import org.opensearch.common.breaker.CircuitBreaker; import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.common.settings.Setting; import org.opensearch.index.store.remote.utils.cache.CacheUsage; import org.opensearch.index.store.remote.utils.cache.RefCountedCache; import org.opensearch.index.store.remote.utils.cache.SegmentedCache; @@ -49,8 +50,20 @@ public class FileCache implements RefCountedCache { private final CircuitBreaker circuitBreaker; - // TODO: Convert the constant into an integer setting - public static final int DATA_TO_FILE_CACHE_SIZE_RATIO = 5; + /** + * Defines a limit of how much total remote data can be referenced as a ratio of the size of the disk reserved for + * the file cache. For example, if 100GB disk space is configured for use as a file cache and the + * remote_data_ratio of 5 is defined, then a total of 500GB of remote data can be loaded as searchable snapshots. + * This is designed to be a safeguard to prevent oversubscribing a cluster. + * Specify a value of zero for no limit, which is the default for compatibility reasons. + */ + public static final Setting DATA_TO_FILE_CACHE_SIZE_RATIO_SETTING = Setting.doubleSetting( + "cluster.filecache.remote_data_ratio", + 0.0, + 0.0, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); public FileCache(SegmentedCache cache, CircuitBreaker circuitBreaker) { this.theCache = cache; diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index fe630dfe27e3a..0bc824c5a0704 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -941,8 +941,9 @@ protected Node( clusterModule.getAllocationService(), metadataCreateIndexService, metadataIndexUpgradeService, - clusterService.getClusterSettings(), - shardLimitValidator + shardLimitValidator, + indicesService, + clusterInfoService::getClusterInfo ); final DiskThresholdMonitor diskThresholdMonitor = new DiskThresholdMonitor( diff --git a/server/src/main/java/org/opensearch/snapshots/RestoreService.java b/server/src/main/java/org/opensearch/snapshots/RestoreService.java index d7e89172c5837..119e632928cc7 100644 --- a/server/src/main/java/org/opensearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/opensearch/snapshots/RestoreService.java @@ -41,6 +41,7 @@ import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.opensearch.action.support.IndicesOptions; import org.opensearch.cluster.ClusterChangedEvent; +import org.opensearch.cluster.ClusterInfo; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.ClusterStateApplier; import org.opensearch.cluster.ClusterStateTaskConfig; @@ -69,6 +70,7 @@ import org.opensearch.cluster.routing.RoutingChangesObserver; import org.opensearch.cluster.routing.RoutingTable; import org.opensearch.cluster.routing.ShardRouting; +import org.opensearch.cluster.routing.ShardsIterator; import org.opensearch.cluster.routing.UnassignedInfo; import org.opensearch.cluster.routing.allocation.AllocationService; import org.opensearch.cluster.service.ClusterManagerTaskKeys; @@ -87,6 +89,9 @@ import org.opensearch.index.IndexSettings; import org.opensearch.index.shard.IndexShard; import org.opensearch.core.index.shard.ShardId; +import org.opensearch.index.snapshots.IndexShardSnapshotStatus; +import org.opensearch.index.store.remote.filecache.FileCacheStats; +import org.opensearch.indices.IndicesService; import org.opensearch.indices.ShardLimitValidator; import org.opensearch.repositories.IndexId; import org.opensearch.repositories.RepositoriesService; @@ -104,6 +109,7 @@ import java.util.Set; import java.util.function.Function; import java.util.function.Predicate; +import java.util.function.Supplier; import java.util.stream.Collectors; import static java.util.Collections.unmodifiableSet; @@ -119,6 +125,8 @@ import static org.opensearch.common.util.FeatureFlags.SEARCHABLE_SNAPSHOT_EXTENDED_COMPATIBILITY; import static org.opensearch.common.util.set.Sets.newHashSet; import static org.opensearch.index.store.remote.directory.RemoteSnapshotDirectory.SEARCHABLE_SNAPSHOT_EXTENDED_COMPATIBILITY_MINIMUM_VERSION; +import static org.opensearch.index.store.remote.filecache.FileCache.DATA_TO_FILE_CACHE_SIZE_RATIO_SETTING; +import static org.opensearch.node.Node.NODE_SEARCH_CACHE_SIZE_SETTING; import static org.opensearch.snapshots.SnapshotUtils.filterIndices; /** @@ -177,6 +185,10 @@ public class RestoreService implements ClusterStateApplier { private final ClusterSettings clusterSettings; + private final IndicesService indicesService; + + private final Supplier clusterInfoSupplier; + private final ClusterManagerTaskThrottler.ThrottlingKey restoreSnapshotTaskKey; private static final CleanRestoreStateTaskExecutor cleanRestoreStateTaskExecutor = new CleanRestoreStateTaskExecutor(); @@ -187,8 +199,9 @@ public RestoreService( AllocationService allocationService, MetadataCreateIndexService createIndexService, MetadataIndexUpgradeService metadataIndexUpgradeService, - ClusterSettings clusterSettings, - ShardLimitValidator shardLimitValidator + ShardLimitValidator shardLimitValidator, + IndicesService indicesService, + Supplier clusterInfoSupplier ) { this.clusterService = clusterService; this.repositoriesService = repositoriesService; @@ -200,6 +213,8 @@ public RestoreService( } this.clusterSettings = clusterService.getClusterSettings(); this.shardLimitValidator = shardLimitValidator; + this.indicesService = indicesService; + this.clusterInfoSupplier = clusterInfoSupplier; // Task is onboarded for throttling, it will get retried from associated TransportClusterManagerNodeAction. restoreSnapshotTaskKey = clusterService.registerClusterManagerTask(ClusterManagerTaskKeys.RESTORE_SNAPSHOT_KEY, true); @@ -415,7 +430,6 @@ public ClusterManagerTaskThrottler.ThrottlingKey getClusterManagerThrottlingKey( @Override public ClusterState execute(ClusterState currentState) { - RestoreInProgress restoreInProgress = currentState.custom(RestoreInProgress.TYPE, RestoreInProgress.EMPTY); // Check if the snapshot to restore is currently being deleted SnapshotDeletionsInProgress deletionsInProgress = currentState.custom( SnapshotDeletionsInProgress.TYPE, @@ -436,7 +450,9 @@ public ClusterState execute(ClusterState currentState) { ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); RoutingTable.Builder rtBuilder = RoutingTable.builder(currentState.routingTable()); final Map shards; + final boolean isRemoteSnapshot = IndexModule.Type.REMOTE_SNAPSHOT.match(request.storageType().toString()); Set aliases = new HashSet<>(); + long totalRestorableRemoteIndexesSize = 0; if (indices.isEmpty() == false) { // We have some indices to restore @@ -447,17 +463,14 @@ public ClusterState execute(ClusterState currentState) { String index = indexEntry.getValue(); boolean partial = checkPartial(index); + IndexId snapshotIndexId = repositoryData.resolveIndexId(index); IndexMetadata snapshotIndexMetadata = updateIndexSettings( metadata.index(index), request.indexSettings(), request.ignoreIndexSettings() ); - if (IndexModule.Type.REMOTE_SNAPSHOT.match(request.storageType().toString())) { - snapshotIndexMetadata = addSnapshotToIndexSettings( - snapshotIndexMetadata, - snapshot, - repositoryData.resolveIndexId(index) - ); + if (isRemoteSnapshot) { + snapshotIndexMetadata = addSnapshotToIndexSettings(snapshotIndexMetadata, snapshot, snapshotIndexId); } final boolean isSearchableSnapshot = IndexModule.Type.REMOTE_SNAPSHOT.match( snapshotIndexMetadata.getSettings().get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey()) @@ -483,7 +496,7 @@ public ClusterState execute(ClusterState currentState) { restoreUUID, snapshot, snapshotInfo.version(), - repositoryData.resolveIndexId(index), + snapshotIndexId, isSearchableSnapshot, isRemoteStoreShallowCopy, request.getSourceRemoteStoreRepository() @@ -602,6 +615,14 @@ public ClusterState execute(ClusterState currentState) { } for (int shard = 0; shard < snapshotIndexMetadata.getNumberOfShards(); shard++) { + if (isRemoteSnapshot) { + IndexShardSnapshotStatus.Copy shardStatus = repository.getShardSnapshotStatus( + snapshotInfo.snapshotId(), + snapshotIndexId, + new ShardId(metadata.index(index).getIndex(), shard) + ).asCopy(); + totalRestorableRemoteIndexesSize += shardStatus.getTotalSize(); + } if (!ignoreShards.contains(shard)) { shardsBuilder.put( new ShardId(renamedIndex, shard), @@ -638,6 +659,9 @@ public ClusterState execute(ClusterState currentState) { } checkAliasNameConflicts(indices, aliases); + if (isRemoteSnapshot) { + validateSearchableSnapshotRestorable(totalRestorableRemoteIndexesSize); + } Map updatedDataStreams = new HashMap<>(currentState.metadata().dataStreams()); updatedDataStreams.putAll( @@ -837,6 +861,45 @@ private IndexMetadata updateIndexSettings( return builder.settings(settingsBuilder).build(); } + private void validateSearchableSnapshotRestorable(long totalRestorableRemoteIndexesSize) { + ClusterInfo clusterInfo = clusterInfoSupplier.get(); + double remoteDataToFileCacheRatio = DATA_TO_FILE_CACHE_SIZE_RATIO_SETTING.get(clusterService.getSettings()); + Map nodeFileCacheStats = clusterInfo.getNodeFileCacheStats(); + if (nodeFileCacheStats.isEmpty() || remoteDataToFileCacheRatio <= 0.01f) { + return; + } + + long totalNodeFileCacheSize = clusterInfo.getNodeFileCacheStats() + .values() + .stream() + .map(fileCacheStats -> fileCacheStats.getTotal().getBytes()) + .mapToLong(Long::longValue) + .sum(); + + Predicate isRemoteSnapshotShard = shardRouting -> shardRouting.primary() + && indicesService.indexService(shardRouting.index()).getIndexSettings().isRemoteSnapshot(); + + ShardsIterator shardsIterator = clusterService.state() + .routingTable() + .allShardsSatisfyingPredicate(isRemoteSnapshotShard); + + long totalRestoredRemoteIndexesSize = shardsIterator.getShardRoutings() + .stream() + .map(clusterInfo::getShardSize) + .mapToLong(Long::longValue) + .sum(); + + if (totalRestoredRemoteIndexesSize + totalRestorableRemoteIndexesSize > remoteDataToFileCacheRatio + * totalNodeFileCacheSize) { + throw new SnapshotRestoreException( + snapshot, + "Size of the indexes to be restored exceeds the file cache bounds. Increase the file cache capacity on the cluster nodes using " + + NODE_SEARCH_CACHE_SIZE_SETTING.getKey() + + " setting." + ); + } + } + @Override public void onFailure(String source, Exception e) { logger.warn(() -> new ParameterizedMessage("[{}] failed to restore snapshot", snapshotId), e); diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java index 5c20b3b262730..82b2cfb2e3e51 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java @@ -112,6 +112,10 @@ private RestoreSnapshotRequest randomState(RestoreSnapshotRequest instance) { instance.snapshotUuid(randomBoolean() ? null : randomAlphaOfLength(10)); } + instance.storageType( + randomBoolean() ? RestoreSnapshotRequest.StorageType.LOCAL : RestoreSnapshotRequest.StorageType.REMOTE_SNAPSHOT + ); + if (randomBoolean()) { instance.setSourceRemoteStoreRepository(randomAlphaOfLengthBetween(5, 10)); } diff --git a/server/src/test/java/org/opensearch/cluster/routing/RoutingTableTests.java b/server/src/test/java/org/opensearch/cluster/routing/RoutingTableTests.java index 0ff9d6f07751a..53f1d71947f7c 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/RoutingTableTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/RoutingTableTests.java @@ -231,6 +231,49 @@ public void testShardsMatchingPredicateCount() { assertThat(clusterState.routingTable().shardsMatchingPredicateCount(predicate), is(2)); } + public void testAllShardsMatchingPredicate() { + MockAllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator()); + Metadata metadata = Metadata.builder() + .put(IndexMetadata.builder("test1").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) + .put(IndexMetadata.builder("test2").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) + .build(); + ClusterState clusterState = ClusterState.builder(org.opensearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) + .metadata(metadata) + .routingTable(RoutingTable.builder().addAsNew(metadata.index("test1")).addAsNew(metadata.index("test2")).build()) + .build(); + clusterState = ClusterState.builder(clusterState) + .nodes(DiscoveryNodes.builder().add(newNode("node1")).add(newNode("node2"))) + .build(); + clusterState = allocation.reroute(clusterState, "reroute"); + + Predicate predicate = s -> s.state() == ShardRoutingState.UNASSIGNED && s.unassignedInfo().isDelayed(); + assertThat(clusterState.routingTable().allShardsSatisfyingPredicate(predicate).size(), is(0)); + + // starting primaries + clusterState = startInitializingShardsAndReroute(allocation, clusterState); + // starting replicas + clusterState = startInitializingShardsAndReroute(allocation, clusterState); + // remove node2 and reroute + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove("node2")).build(); + // make sure both replicas are marked as delayed (i.e. not reallocated) + clusterState = allocation.disassociateDeadNodes(clusterState, true, "reroute"); + assertThat(clusterState.routingTable().allShardsSatisfyingPredicate(predicate).size(), is(2)); + + // Verifies true against all shards on the node (active/inactive) + assertThat(clusterState.routingTable().allShardsSatisfyingPredicate(shard -> true).size(), is(4)); + // Verifies false against all shards on the node (active/inactive) + assertThat(clusterState.routingTable().allShardsSatisfyingPredicate(shard -> false).size(), is(0)); + // Verifies against all primary shards on the node + assertThat(clusterState.routingTable().allShardsSatisfyingPredicate(ShardRouting::primary).size(), is(2)); + // Verifies a predicate which tests for inactive replicas + assertThat( + clusterState.routingTable() + .allShardsSatisfyingPredicate(shardRouting -> !shardRouting.primary() && !shardRouting.active()) + .size(), + is(2) + ); + } + public void testActivePrimaryShardsGrouped() { assertThat(this.emptyRoutingTable.activePrimaryShardsGrouped(new String[0], true).size(), is(0)); assertThat(this.emptyRoutingTable.activePrimaryShardsGrouped(new String[0], false).size(), is(0)); diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index 2180a14f5bf30..bde8a45359814 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -69,6 +69,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.core.index.Index; import org.opensearch.core.index.shard.ShardId; +import org.opensearch.index.store.remote.filecache.FileCache; import org.opensearch.index.store.remote.filecache.FileCacheStats; import org.opensearch.repositories.IndexId; import org.opensearch.snapshots.EmptySnapshotsInfoService; @@ -405,6 +406,7 @@ public void testFileCacheRemoteShardsDecisions() { DiskThresholdDecider diskThresholdDecider = makeDecider(diskSettings); Metadata metadata = Metadata.builder() .put(IndexMetadata.builder("test").settings(remoteIndexSettings(Version.CURRENT)).numberOfShards(2).numberOfReplicas(0)) + .persistentSettings(Settings.builder().put(FileCache.DATA_TO_FILE_CACHE_SIZE_RATIO_SETTING.getKey(), 5).build()) .build(); RoutingTable initialRoutingTable = RoutingTable.builder().addAsNew(metadata.index("test")).build(); diff --git a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java index a121a190096b4..e4dec5163c400 100644 --- a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java @@ -34,6 +34,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.mockito.Mockito; import org.opensearch.ExceptionsHelper; import org.opensearch.Version; import org.opensearch.action.ActionListener; @@ -105,6 +106,8 @@ import org.opensearch.client.AdminClient; import org.opensearch.client.node.NodeClient; import org.opensearch.cluster.ClusterChangedEvent; +import org.opensearch.cluster.ClusterInfo; +import org.opensearch.cluster.ClusterInfoService; import org.opensearch.cluster.ClusterModule; import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.ClusterState; @@ -176,7 +179,9 @@ import org.opensearch.index.seqno.RetentionLeaseSyncer; import org.opensearch.index.shard.PrimaryReplicaSyncer; import org.opensearch.index.store.RemoteSegmentStoreDirectoryFactory; +import org.opensearch.index.store.remote.filecache.FileCache; import org.opensearch.index.store.remote.filecache.FileCacheCleaner; +import org.opensearch.index.store.remote.filecache.FileCacheStats; import org.opensearch.indices.IndicesModule; import org.opensearch.indices.IndicesService; import org.opensearch.indices.ShardLimitValidator; @@ -213,6 +218,7 @@ import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.disruption.DisruptableMockTransport; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.RemoteTransportException; import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportInterceptor; import org.opensearch.transport.TransportRequest; @@ -245,6 +251,7 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; +import static org.mockito.Mockito.when; import static org.opensearch.action.support.ActionTestUtils.assertNoFailureListener; import static org.opensearch.env.Environment.PATH_HOME_SETTING; import static org.opensearch.monitor.StatusInfo.Status.HEALTHY; @@ -260,6 +267,7 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Mockito.mock; +import static org.opensearch.node.Node.NODE_SEARCH_CACHE_SIZE_SETTING; public class SnapshotResiliencyTests extends OpenSearchTestCase { @@ -413,6 +421,106 @@ public void testSuccessfulSnapshotAndRestore() { assertEquals(0, snapshotInfo.failedShards()); } + public void testSearchableSnapshotOverSubscription() { + setupTestCluster(1, 2, 2); + + String repoName = "repo"; + String snapshotName = "snapshot"; + final String index = "test"; + final int shards = randomIntBetween(1, 10); + final int documents = randomIntBetween(0, 100); + + final TestClusterNodes.TestClusterNode clusterManagerNode = testClusterNodes.currentClusterManager( + testClusterNodes.nodes.values().iterator().next().clusterService.state() + ); + + Map nodeFileCacheStats = new HashMap<>(); + for (TestClusterNodes.TestClusterNode node : testClusterNodes.nodes.values()) { + nodeFileCacheStats.put(node.node.getId(), new FileCacheStats(0, 1, 0, 0, 0, 0, 0)); + } + ClusterInfo clusterInfo = new ClusterInfo(Map.of(), Map.of(), Map.of(), Map.of(), Map.of(), nodeFileCacheStats); + testClusterNodes.nodes.values().forEach(node -> when(node.getMockClusterInfoService().getClusterInfo()).thenReturn(clusterInfo)); + + final StepListener createSnapshotResponseListener = new StepListener<>(); + + continueOrDie(createRepoAndIndex(repoName, index, shards), createIndexResponse -> { + final Runnable afterIndexing = () -> client().admin() + .cluster() + .prepareCreateSnapshot(repoName, snapshotName) + .setWaitForCompletion(true) + .execute(createSnapshotResponseListener); + if (documents == 0) { + afterIndexing.run(); + } else { + final BulkRequest bulkRequest = new BulkRequest().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + for (int i = 0; i < documents; ++i) { + bulkRequest.add(new IndexRequest(index).source(Collections.singletonMap("foo", "bar" + i))); + } + final StepListener bulkResponseStepListener = new StepListener<>(); + client().bulk(bulkRequest, bulkResponseStepListener); + continueOrDie(bulkResponseStepListener, bulkResponse -> { + assertFalse("Failures in bulk response: " + bulkResponse.buildFailureMessage(), bulkResponse.hasFailures()); + assertEquals(documents, bulkResponse.getItems().length); + afterIndexing.run(); + }); + } + }); + + final StepListener deleteIndexListener = new StepListener<>(); + + continueOrDie( + createSnapshotResponseListener, + createSnapshotResponse -> client().admin().indices().delete(new DeleteIndexRequest(index), deleteIndexListener) + ); + + final StepListener restoreSnapshotResponseListener = new StepListener<>(); + continueOrDie( + deleteIndexListener, + ignored -> client().admin() + .cluster() + .restoreSnapshot( + new RestoreSnapshotRequest(repoName, snapshotName).waitForCompletion(true) + .storageType(RestoreSnapshotRequest.StorageType.REMOTE_SNAPSHOT), + restoreSnapshotResponseListener + ) + ); + + final AtomicBoolean exceptionVerified = new AtomicBoolean(); + + restoreSnapshotResponseListener.whenComplete(null, restoreSnapshotException -> { + Throwable throwable = restoreSnapshotException; + if (restoreSnapshotException instanceof RemoteTransportException) { + throwable = restoreSnapshotException.getCause(); + } + try { + assertTrue(throwable instanceof SnapshotRestoreException); + assertTrue( + throwable.getMessage() + .contains( + "Size of the indexes to be restored exceeds the file cache bounds. Increase the file cache capacity on the cluster nodes using " + + NODE_SEARCH_CACHE_SIZE_SETTING.getKey() + + " setting." + ) + ); + } catch (SnapshotRestoreException ignored) {} + exceptionVerified.set(true); + }); + + runUntil(exceptionVerified::get, TimeUnit.MINUTES.toMillis(5L)); + assertTrue(exceptionVerified.get()); + SnapshotsInProgress finalSnapshotsInProgress = clusterManagerNode.clusterService.state().custom(SnapshotsInProgress.TYPE); + assertFalse(finalSnapshotsInProgress.entries().stream().anyMatch(entry -> entry.state().completed() == false)); + final Repository repository = clusterManagerNode.repositoriesService.repository(repoName); + Collection snapshotIds = getRepositoryData(repository).getSnapshotIds(); + assertThat(snapshotIds, hasSize(1)); + + final SnapshotInfo snapshotInfo = repository.getSnapshotInfo(snapshotIds.iterator().next()); + assertEquals(SnapshotState.SUCCESS, snapshotInfo.state()); + assertThat(snapshotInfo.indices(), containsInAnyOrder(index)); + assertEquals(shards, snapshotInfo.successfulShards()); + assertEquals(0, snapshotInfo.failedShards()); + } + public void testSnapshotWithNodeDisconnects() { final int dataNodes = randomIntBetween(2, 10); final int clusterManagerNodes = randomFrom(1, 3, 5); @@ -1415,6 +1523,11 @@ private void setupTestCluster(int clusterManagerNodes, int dataNodes) { startCluster(); } + private void setupTestCluster(int clusterManagerNodes, int dataNodes, int searchNodes) { + testClusterNodes = new TestClusterNodes(clusterManagerNodes, dataNodes, searchNodes); + startCluster(); + } + private void scheduleSoon(Runnable runnable) { deterministicTaskQueue.scheduleAt(deterministicTaskQueue.getCurrentTimeMillis() + randomLongBetween(0, 100L), runnable); } @@ -1465,6 +1578,7 @@ private Environment createEnvironment(String nodeName) { ClusterBootstrapService.INITIAL_CLUSTER_MANAGER_NODES_SETTING.getKey(), ClusterBootstrapService.INITIAL_CLUSTER_MANAGER_NODES_SETTING.get(Settings.EMPTY) ) + .put(FileCache.DATA_TO_FILE_CACHE_SIZE_RATIO_SETTING.getKey(), 5) .put(MappingUpdatedAction.INDICES_MAX_IN_FLIGHT_UPDATES_SETTING.getKey(), 1000) // o.w. some tests might block .build() ); @@ -1488,6 +1602,10 @@ private final class TestClusterNodes { private final Set disconnectedNodes = new HashSet<>(); TestClusterNodes(int clusterManagerNodes, int dataNodes) { + this(clusterManagerNodes, dataNodes, 0); + } + + TestClusterNodes(int clusterManagerNodes, int dataNodes, int searchNodes) { for (int i = 0; i < clusterManagerNodes; ++i) { nodes.computeIfAbsent("node" + i, nodeName -> { try { @@ -1506,6 +1624,15 @@ private final class TestClusterNodes { } }); } + for (int i = 0; i < searchNodes; ++i) { + nodes.computeIfAbsent("search-node" + i, nodeName -> { + try { + return newSearchNode(nodeName); + } catch (IOException e) { + throw new AssertionError(e); + } + }); + } } public TestClusterNode nodeById(final String nodeId) { @@ -1524,6 +1651,10 @@ private TestClusterNode newDataNode(String nodeName) throws IOException { return newNode(nodeName, DiscoveryNodeRole.DATA_ROLE); } + private TestClusterNode newSearchNode(String nodeName) throws IOException { + return newNode(nodeName, DiscoveryNodeRole.SEARCH_ROLE); + } + private TestClusterNode newNode(String nodeName, DiscoveryNodeRole role) throws IOException { return new TestClusterNode( new DiscoveryNode( @@ -1667,6 +1798,8 @@ private final class TestClusterNode { private final ThreadPool threadPool; + private final ClusterInfoService clusterInfoService; + private Coordinator coordinator; TestClusterNode(DiscoveryNode node) throws IOException { @@ -1784,6 +1917,7 @@ public void onFailure(final Exception e) { final NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(Collections.emptyList()); final ScriptService scriptService = new ScriptService(settings, emptyMap(), emptyMap()); client = new NodeClient(settings, threadPool); + clusterInfoService = Mockito.mock(ClusterInfoService.class); final SetOnce rerouteServiceSetOnce = new SetOnce<>(); final SnapshotsInfoService snapshotsInfoService = new InternalSnapshotsInfoService( settings, @@ -1993,8 +2127,9 @@ public void onFailure(final Exception e) { new SystemIndices(emptyMap()), null ), - clusterSettings, - shardLimitValidator + shardLimitValidator, + indicesService, + clusterInfoService::getClusterInfo ); actions.put( PutMappingAction.INSTANCE, @@ -2207,6 +2342,10 @@ protected void assertSnapshotOrGenericThread() { } } + public ClusterInfoService getMockClusterInfoService() { + return clusterInfoService; + } + public void restart() { testClusterNodes.disconnectNode(this); final ClusterState oldState = this.clusterService.state(); diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java index cdaa42c592c06..14275d838e6a9 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java @@ -117,6 +117,7 @@ import org.opensearch.index.analysis.NamedAnalyzer; import org.opensearch.index.analysis.TokenFilterFactory; import org.opensearch.index.analysis.TokenizerFactory; +import org.opensearch.index.store.remote.filecache.FileCache; import org.opensearch.indices.analysis.AnalysisModule; import org.opensearch.monitor.jvm.JvmInfo; import org.opensearch.plugins.AnalysisPlugin; @@ -1201,6 +1202,7 @@ public static Settings.Builder settings(Version version) { public static Settings.Builder remoteIndexSettings(Version version) { Settings.Builder builder = Settings.builder() + .put(FileCache.DATA_TO_FILE_CACHE_SIZE_RATIO_SETTING.getKey(), 5) .put(IndexMetadata.SETTING_VERSION_CREATED, version) .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.REMOTE_SNAPSHOT.getSettingsKey()); return builder; From 0750907e6276e36debf84b8fdd1328698c6a4cbe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 Jul 2023 11:08:20 -0400 Subject: [PATCH 25/75] Bump com.gradle.enterprise from 3.13.3 to 3.14.1 (#8996) * Bump com.gradle.enterprise from 3.13.3 to 3.14.1 Bumps com.gradle.enterprise from 3.13.3 to 3.14.1. --- updated-dependencies: - dependency-name: com.gradle.enterprise dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 3 ++- settings.gradle | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 861674753861a..a96c6de1fca26 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -92,6 +92,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.google.http-client:google-http-client-gson` from 1.43.2 to 1.43.3 ([#8840](https://github.com/opensearch-project/OpenSearch/pull/8840)) - OpenJDK Update (July 2023 Patch releases) ([#8868](https://github.com/opensearch-project/OpenSearch/pull/8868) - Bump `hadoop` libraries from 3.3.4 to 3.3.6 ([#6995](https://github.com/opensearch-project/OpenSearch/pull/6995)) +- Bump `com.gradle.enterprise` from 3.13.3 to 3.14.1 ([#8996](https://github.com/opensearch-project/OpenSearch/pull/8996)) ### Changed - Perform aggregation postCollection in ContextIndexSearcher after searching leaves ([#8303](https://github.com/opensearch-project/OpenSearch/pull/8303)) @@ -110,4 +111,4 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Security [Unreleased 3.0]: https://github.com/opensearch-project/OpenSearch/compare/2.x...HEAD -[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.10...2.x +[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.10...2.x \ No newline at end of file diff --git a/settings.gradle b/settings.gradle index 94f5de26f4090..c04b5997d49b1 100644 --- a/settings.gradle +++ b/settings.gradle @@ -10,7 +10,7 @@ */ plugins { - id "com.gradle.enterprise" version "3.13.3" + id "com.gradle.enterprise" version "3.14.1" } buildCache { From ddb967aec79660e94bac519e3fc19ec837c95083 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 Jul 2023 12:50:00 -0400 Subject: [PATCH 26/75] Bump org.apache.commons:commons-lang3 from 3.12.0 to 3.13.0 in /plugins/ingest-attachment (#8995) * Bump org.apache.commons:commons-lang3 in /plugins/ingest-attachment Bumps org.apache.commons:commons-lang3 from 3.12.0 to 3.13.0. --- updated-dependencies: - dependency-name: org.apache.commons:commons-lang3 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 1 + plugins/ingest-attachment/build.gradle | 2 +- .../ingest-attachment/licenses/commons-lang3-3.12.0.jar.sha1 | 1 - .../ingest-attachment/licenses/commons-lang3-3.13.0.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 plugins/ingest-attachment/licenses/commons-lang3-3.12.0.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/commons-lang3-3.13.0.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index a96c6de1fca26..01d51b39279c9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -93,6 +93,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - OpenJDK Update (July 2023 Patch releases) ([#8868](https://github.com/opensearch-project/OpenSearch/pull/8868) - Bump `hadoop` libraries from 3.3.4 to 3.3.6 ([#6995](https://github.com/opensearch-project/OpenSearch/pull/6995)) - Bump `com.gradle.enterprise` from 3.13.3 to 3.14.1 ([#8996](https://github.com/opensearch-project/OpenSearch/pull/8996)) +- Bump `org.apache.commons:commons-lang3` from 3.12.0 to 3.13.0 ([#8995](https://github.com/opensearch-project/OpenSearch/pull/8995)) ### Changed - Perform aggregation postCollection in ContextIndexSearcher after searching leaves ([#8303](https://github.com/opensearch-project/OpenSearch/pull/8303)) diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle index 62651216c8144..17213b96a25f3 100644 --- a/plugins/ingest-attachment/build.gradle +++ b/plugins/ingest-attachment/build.gradle @@ -89,7 +89,7 @@ dependencies { api "org.apache.james:apache-mime4j-core:${versions.mime4j}" api "org.apache.james:apache-mime4j-dom:${versions.mime4j}" // EPUB books - api 'org.apache.commons:commons-lang3:3.12.0' + api 'org.apache.commons:commons-lang3:3.13.0' // Microsoft Word files with visio diagrams api 'org.apache.commons:commons-math3:3.6.1' // POIs dependency diff --git a/plugins/ingest-attachment/licenses/commons-lang3-3.12.0.jar.sha1 b/plugins/ingest-attachment/licenses/commons-lang3-3.12.0.jar.sha1 deleted file mode 100644 index 9273d8c01aaba..0000000000000 --- a/plugins/ingest-attachment/licenses/commons-lang3-3.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c6842c86792ff03b9f1d1fe2aab8dc23aa6c6f0e \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-lang3-3.13.0.jar.sha1 b/plugins/ingest-attachment/licenses/commons-lang3-3.13.0.jar.sha1 new file mode 100644 index 0000000000000..d0c2f2486ee1f --- /dev/null +++ b/plugins/ingest-attachment/licenses/commons-lang3-3.13.0.jar.sha1 @@ -0,0 +1 @@ +b7263237aa89c1f99b327197c41d0669707a462e \ No newline at end of file From 8b751f8141ba87b4435db78cd8df7631e48b17b7 Mon Sep 17 00:00:00 2001 From: Andrew Ross Date: Mon, 31 Jul 2023 09:56:40 -0700 Subject: [PATCH 27/75] Remove MasterServiceTests from retries (#8984) After the fix in #8901, I ran the following in a loop all day (about 1400 times) and saw no failures: ``` ./gradlew ':server:test' --tests "org.opensearch.cluster.service.MasterServiceTests" ``` Signed-off-by: Andrew Ross --- build.gradle | 1 - 1 file changed, 1 deletion(-) diff --git a/build.gradle b/build.gradle index 12499bbf6a817..4d85638270eca 100644 --- a/build.gradle +++ b/build.gradle @@ -487,7 +487,6 @@ subprojects { includeClasses.add("org.opensearch.cluster.metadata.IndexGraveyardTests") includeClasses.add("org.opensearch.cluster.routing.MovePrimaryFirstTests") includeClasses.add("org.opensearch.cluster.routing.allocation.decider.DiskThresholdDeciderIT") - includeClasses.add("org.opensearch.cluster.service.MasterServiceTests") includeClasses.add("org.opensearch.common.util.concurrent.QueueResizableOpenSearchThreadPoolExecutorTests") includeClasses.add("org.opensearch.gateway.RecoveryFromGatewayIT") includeClasses.add("org.opensearch.gateway.ReplicaShardAllocatorIT") From 35662f06bbef75447ba6204bc6a7e032066509cc Mon Sep 17 00:00:00 2001 From: Ticheng Lin <51488860+ticheng-aws@users.noreply.github.com> Date: Mon, 31 Jul 2023 10:06:03 -0700 Subject: [PATCH 28/75] Add support for aggregation profiler with concurrent aggregation (#8801) * Add support for aggregation profiler with concurrent aggregation (#8801) Signed-off-by: Ticheng Lin * Address review comments for support for aggregation profiler with concurrent aggregation (#8801) Signed-off-by: Ticheng Lin * Refactor ProfileResult class and add more tests Signed-off-by: Ticheng Lin * Fix flaky QueryProfilePhaseTests.testCollapseQuerySearchResults test Signed-off-by: Ticheng Lin --------- Signed-off-by: Ticheng Lin --- CHANGELOG.md | 1 + .../aggregation/AggregationProfilerIT.java | 187 ++++++++++++++-- .../org/opensearch/search/SearchService.java | 2 +- .../profile/AbstractProfileBreakdown.java | 21 +- .../search/profile/ProfileResult.java | 94 ++++++++- .../opensearch/search/profile/Profilers.java | 9 +- .../org/opensearch/search/profile/Timer.java | 13 +- .../AggregationProfileBreakdown.java | 15 ++ .../aggregation/AggregationProfiler.java | 24 +-- .../ConcurrentAggregationProfiler.java | 199 ++++++++++++++++++ .../InternalAggregationProfileTree.java | 4 + .../ConcurrentQueryProfileBreakdown.java | 4 +- .../search/profile/ProfileResultTests.java | 100 ++++++++- .../opensearch/search/profile/TimerTests.java | 6 +- .../AggregationProfileShardResultTests.java | 2 +- .../ConcurrentAggregationProfilerTests.java | 182 ++++++++++++++++ .../query/QueryProfileShardResultTests.java | 2 +- .../opensearch/test/TestSearchContext.java | 3 +- 18 files changed, 800 insertions(+), 68 deletions(-) create mode 100644 server/src/main/java/org/opensearch/search/profile/aggregation/ConcurrentAggregationProfiler.java create mode 100644 server/src/test/java/org/opensearch/search/profile/aggregation/ConcurrentAggregationProfilerTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 01d51b39279c9..7aa7d14a936a8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -102,6 +102,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Exclude 'benchmarks' from codecov report ([#8805](https://github.com/opensearch-project/OpenSearch/pull/8805)) - [Refactor] MediaTypeParser to MediaTypeParserRegistry ([#8636](https://github.com/opensearch-project/OpenSearch/pull/8636)) - Create separate SourceLookup instance per segment slice in SignificantTextAggregatorFactory ([#8807](https://github.com/opensearch-project/OpenSearch/pull/8807)) +- Add support for aggregation profiler with concurrent aggregation ([#8801](https://github.com/opensearch-project/OpenSearch/pull/8801)) ### Deprecated diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java index 0f08c537d74d8..9d0c30c5a488f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java @@ -90,7 +90,52 @@ public class AggregationProfilerIT extends OpenSearchIntegTestCase { COLLECT + "_count", POST_COLLECTION + "_count", BUILD_AGGREGATION + "_count", - REDUCE + "_count" + REDUCE + "_count", + INITIALIZE + "_start_time", + BUILD_LEAF_COLLECTOR + "_start_time", + COLLECT + "_start_time", + POST_COLLECTION + "_start_time", + BUILD_AGGREGATION + "_start_time", + REDUCE + "_start_time" + ); + + private static final Set CONCURRENT_SEARCH_BREAKDOWN_KEYS = Set.of( + INITIALIZE, + BUILD_LEAF_COLLECTOR, + COLLECT, + POST_COLLECTION, + BUILD_AGGREGATION, + REDUCE, + INITIALIZE + "_count", + BUILD_LEAF_COLLECTOR + "_count", + COLLECT + "_count", + POST_COLLECTION + "_count", + BUILD_AGGREGATION + "_count", + REDUCE + "_count", + "max_" + INITIALIZE, + "max_" + BUILD_LEAF_COLLECTOR, + "max_" + COLLECT, + "max_" + POST_COLLECTION, + "max_" + BUILD_AGGREGATION, + "max_" + REDUCE, + "min_" + INITIALIZE, + "min_" + BUILD_LEAF_COLLECTOR, + "min_" + COLLECT, + "min_" + POST_COLLECTION, + "min_" + BUILD_AGGREGATION, + "min_" + REDUCE, + "avg_" + INITIALIZE, + "avg_" + BUILD_LEAF_COLLECTOR, + "avg_" + COLLECT, + "avg_" + POST_COLLECTION, + "avg_" + BUILD_AGGREGATION, + "avg_" + REDUCE, + "max_" + BUILD_LEAF_COLLECTOR + "_count", + "max_" + COLLECT + "_count", + "min_" + BUILD_LEAF_COLLECTOR + "_count", + "min_" + COLLECT + "_count", + "avg_" + BUILD_LEAF_COLLECTOR + "_count", + "avg_" + COLLECT + "_count" ); private static final String TOTAL_BUCKETS = "total_buckets"; @@ -169,7 +214,12 @@ public void testSimpleProfile() { assertThat(histoAggResult.getTime(), greaterThan(0L)); Map breakdown = histoAggResult.getTimeBreakdown(); assertThat(breakdown, notNullValue()); - assertThat(breakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (histoAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(breakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(breakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(breakdown.get(INITIALIZE), greaterThan(0L)); assertThat(breakdown.get(COLLECT), greaterThan(0L)); assertThat(breakdown.get(BUILD_AGGREGATION).longValue(), greaterThan(0L)); @@ -212,7 +262,12 @@ public void testMultiLevelProfile() { assertThat(histoAggResult.getTime(), greaterThan(0L)); Map histoBreakdown = histoAggResult.getTimeBreakdown(); assertThat(histoBreakdown, notNullValue()); - assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (histoAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(histoBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(histoBreakdown.get(COLLECT), greaterThan(0L)); assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); @@ -230,7 +285,12 @@ public void testMultiLevelProfile() { assertThat(termsAggResult.getTime(), greaterThan(0L)); Map termsBreakdown = termsAggResult.getTimeBreakdown(); assertThat(termsBreakdown, notNullValue()); - assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (termsAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(termsBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(termsBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(termsBreakdown.get(COLLECT), greaterThan(0L)); assertThat(termsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); @@ -245,7 +305,12 @@ public void testMultiLevelProfile() { assertThat(avgAggResult.getTime(), greaterThan(0L)); Map avgBreakdown = termsAggResult.getTimeBreakdown(); assertThat(avgBreakdown, notNullValue()); - assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (avgAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(avgBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(avgBreakdown.get(COLLECT), greaterThan(0L)); assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); @@ -298,7 +363,12 @@ public void testMultiLevelProfileBreadthFirst() { assertThat(histoAggResult.getTime(), greaterThan(0L)); Map histoBreakdown = histoAggResult.getTimeBreakdown(); assertThat(histoBreakdown, notNullValue()); - assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (histoAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(histoBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(histoBreakdown.get(COLLECT), greaterThan(0L)); assertThat(histoBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); @@ -316,7 +386,12 @@ public void testMultiLevelProfileBreadthFirst() { assertThat(termsAggResult.getTime(), greaterThan(0L)); Map termsBreakdown = termsAggResult.getTimeBreakdown(); assertThat(termsBreakdown, notNullValue()); - assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (termsAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(termsBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(termsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(termsBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(termsBreakdown.get(COLLECT), greaterThan(0L)); assertThat(termsBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); @@ -331,7 +406,12 @@ public void testMultiLevelProfileBreadthFirst() { assertThat(avgAggResult.getTime(), greaterThan(0L)); Map avgBreakdown = avgAggResult.getTimeBreakdown(); assertThat(avgBreakdown, notNullValue()); - assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (avgAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(avgBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(avgBreakdown.get(COLLECT), greaterThan(0L)); assertThat(avgBreakdown.get(BUILD_AGGREGATION), greaterThan(0L)); @@ -369,7 +449,12 @@ public void testDiversifiedAggProfile() { assertThat(diversifyAggResult.getTime(), greaterThan(0L)); Map diversifyBreakdown = diversifyAggResult.getTimeBreakdown(); assertThat(diversifyBreakdown, notNullValue()); - assertThat(diversifyBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (diversifyAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(diversifyBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(diversifyBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(diversifyBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(diversifyBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); assertThat(diversifyBreakdown.get(COLLECT), greaterThan(0L)); @@ -386,7 +471,12 @@ public void testDiversifiedAggProfile() { assertThat(maxAggResult.getTime(), greaterThan(0L)); Map maxBreakdown = maxAggResult.getTimeBreakdown(); assertThat(maxBreakdown, notNullValue()); - assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (maxAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(maxBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(diversifyBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(diversifyBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); assertThat(diversifyBreakdown.get(COLLECT), greaterThan(0L)); @@ -439,7 +529,12 @@ public void testComplexProfile() { assertThat(histoAggResult.getTime(), greaterThan(0L)); Map histoBreakdown = histoAggResult.getTimeBreakdown(); assertThat(histoBreakdown, notNullValue()); - assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (histoAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(histoBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(histoBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(histoBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(histoBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); assertThat(histoBreakdown.get(COLLECT), greaterThan(0L)); @@ -462,7 +557,12 @@ public void testComplexProfile() { assertThat(tagsAggResult.getTime(), greaterThan(0L)); Map tagsBreakdown = tagsAggResult.getTimeBreakdown(); assertThat(tagsBreakdown, notNullValue()); - assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (tagsAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(tagsBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(tagsBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L)); @@ -482,7 +582,12 @@ public void testComplexProfile() { assertThat(avgAggResult.getTime(), greaterThan(0L)); Map avgBreakdown = avgAggResult.getTimeBreakdown(); assertThat(avgBreakdown, notNullValue()); - assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (avgAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(avgBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(avgBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); assertThat(avgBreakdown.get(COLLECT), greaterThan(0L)); @@ -498,7 +603,12 @@ public void testComplexProfile() { assertThat(maxAggResult.getTime(), greaterThan(0L)); Map maxBreakdown = maxAggResult.getTimeBreakdown(); assertThat(maxBreakdown, notNullValue()); - assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (maxAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(maxBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(maxBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); assertThat(maxBreakdown.get(COLLECT), greaterThan(0L)); @@ -514,7 +624,12 @@ public void testComplexProfile() { assertThat(stringsAggResult.getTime(), greaterThan(0L)); Map stringsBreakdown = stringsAggResult.getTimeBreakdown(); assertThat(stringsBreakdown, notNullValue()); - assertThat(stringsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (stringsAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(stringsBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(stringsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(stringsBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(stringsBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); assertThat(stringsBreakdown.get(COLLECT), greaterThan(0L)); @@ -534,7 +649,12 @@ public void testComplexProfile() { assertThat(avgAggResult.getTime(), greaterThan(0L)); avgBreakdown = avgAggResult.getTimeBreakdown(); assertThat(avgBreakdown, notNullValue()); - assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (avgAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(avgBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(avgBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); assertThat(avgBreakdown.get(COLLECT), greaterThan(0L)); @@ -550,7 +670,12 @@ public void testComplexProfile() { assertThat(maxAggResult.getTime(), greaterThan(0L)); maxBreakdown = maxAggResult.getTimeBreakdown(); assertThat(maxBreakdown, notNullValue()); - assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (maxAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(maxBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(maxBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); assertThat(maxBreakdown.get(COLLECT), greaterThan(0L)); @@ -567,7 +692,12 @@ public void testComplexProfile() { assertThat(tagsAggResult.getTime(), greaterThan(0L)); tagsBreakdown = tagsAggResult.getTimeBreakdown(); assertThat(tagsBreakdown, notNullValue()); - assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (tagsAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(tagsBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(tagsBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(tagsBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(tagsBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); assertThat(tagsBreakdown.get(COLLECT), greaterThan(0L)); @@ -587,7 +717,12 @@ public void testComplexProfile() { assertThat(avgAggResult.getTime(), greaterThan(0L)); avgBreakdown = avgAggResult.getTimeBreakdown(); assertThat(avgBreakdown, notNullValue()); - assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (avgAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(avgBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(avgBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(avgBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(avgBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); assertThat(avgBreakdown.get(COLLECT), greaterThan(0L)); @@ -603,7 +738,12 @@ public void testComplexProfile() { assertThat(maxAggResult.getTime(), greaterThan(0L)); maxBreakdown = maxAggResult.getTimeBreakdown(); assertThat(maxBreakdown, notNullValue()); - assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + if (maxAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertThat(maxBreakdown.keySet(), equalTo(CONCURRENT_SEARCH_BREAKDOWN_KEYS)); + } else { + assertThat(maxBreakdown.keySet(), equalTo(BREAKDOWN_KEYS)); + } assertThat(maxBreakdown.get(INITIALIZE), greaterThan(0L)); assertThat(maxBreakdown.get(BUILD_LEAF_COLLECTOR), greaterThan(0L)); assertThat(maxBreakdown.get(COLLECT), greaterThan(0L)); @@ -700,7 +840,12 @@ public void testGlobalAggWithStatsSubAggregatorProfile() { assertThat(globalAggResult.getTime(), greaterThan(0L)); Map breakdown = globalAggResult.getTimeBreakdown(); assertThat(breakdown, notNullValue()); - assertEquals(BREAKDOWN_KEYS, breakdown.keySet()); + if (globalAggResult.getMaxSliceTime() != null) { + // concurrent segment search enabled + assertEquals(CONCURRENT_SEARCH_BREAKDOWN_KEYS, breakdown.keySet()); + } else { + assertEquals(BREAKDOWN_KEYS, breakdown.keySet()); + } assertThat(breakdown.get(INITIALIZE), greaterThan(0L)); assertThat(breakdown.get(COLLECT), greaterThan(0L)); assertThat(breakdown.get(BUILD_AGGREGATION).longValue(), greaterThan(0L)); diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index c9c70ed52c376..0259731992f2d 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -1270,7 +1270,7 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc context.minimumScore(source.minScore()); } if (source.profile()) { - context.setProfilers(new Profilers(context.searcher())); + context.setProfilers(new Profilers(context.searcher(), context.isConcurrentSegmentSearchEnabled())); } if (source.timeout() != null) { context.timeout(source.timeout()); diff --git a/server/src/main/java/org/opensearch/search/profile/AbstractProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/AbstractProfileBreakdown.java index a29d4f9a0ee20..67ab062c0e3ca 100644 --- a/server/src/main/java/org/opensearch/search/profile/AbstractProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/AbstractProfileBreakdown.java @@ -50,8 +50,10 @@ public abstract class AbstractProfileBreakdown> { /** * The accumulated timings for this query node */ - private final Timer[] timings; - private final T[] timingTypes; + protected final Timer[] timings; + protected final T[] timingTypes; + public static final String TIMING_TYPE_COUNT_SUFFIX = "_count"; + public static final String TIMING_TYPE_START_TIME_SUFFIX = "_start_time"; /** Sole constructor. */ public AbstractProfileBreakdown(Class clazz) { @@ -74,17 +76,10 @@ public void setTimer(T timing, Timer timer) { * Build a timing count breakdown for current instance */ public Map toBreakdownMap() { - return buildBreakdownMap(this); - } - - /** - * Build a timing count breakdown for arbitrary instance - */ - protected final Map buildBreakdownMap(AbstractProfileBreakdown breakdown) { - Map map = new HashMap<>(breakdown.timings.length * 2); - for (T timingType : breakdown.timingTypes) { - map.put(timingType.toString(), breakdown.timings[timingType.ordinal()].getApproximateTiming()); - map.put(timingType.toString() + "_count", breakdown.timings[timingType.ordinal()].getCount()); + Map map = new HashMap<>(this.timings.length * 3); + for (T timingType : this.timingTypes) { + map.put(timingType.toString(), this.timings[timingType.ordinal()].getApproximateTiming()); + map.put(timingType + TIMING_TYPE_COUNT_SUFFIX, this.timings[timingType.ordinal()].getCount()); } return Collections.unmodifiableMap(map); } diff --git a/server/src/main/java/org/opensearch/search/profile/ProfileResult.java b/server/src/main/java/org/opensearch/search/profile/ProfileResult.java index 89c3d7504de66..d96db1d2dd8da 100644 --- a/server/src/main/java/org/opensearch/search/profile/ProfileResult.java +++ b/server/src/main/java/org/opensearch/search/profile/ProfileResult.java @@ -32,6 +32,7 @@ package org.opensearch.search.profile; +import org.opensearch.Version; import org.opensearch.core.ParseField; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; @@ -44,8 +45,10 @@ import java.io.IOException; import java.util.Collections; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.LinkedHashMap; import java.util.Objects; import java.util.concurrent.TimeUnit; @@ -69,7 +72,13 @@ public final class ProfileResult implements Writeable, ToXContentObject { static final ParseField BREAKDOWN = new ParseField("breakdown"); static final ParseField DEBUG = new ParseField("debug"); static final ParseField NODE_TIME = new ParseField("time"); + static final ParseField MAX_SLICE_NODE_TIME = new ParseField("max_slice_time"); + static final ParseField MIN_SLICE_NODE_TIME = new ParseField("min_slice_time"); + static final ParseField AVG_SLICE_NODE_TIME = new ParseField("avg_slice_time"); static final ParseField NODE_TIME_RAW = new ParseField("time_in_nanos"); + static final ParseField MAX_SLICE_NODE_TIME_RAW = new ParseField("max_slice_time_in_nanos"); + static final ParseField MIN_SLICE_NODE_TIME_RAW = new ParseField("min_slice_time_in_nanos"); + static final ParseField AVG_SLICE_NODE_TIME_RAW = new ParseField("avg_slice_time_in_nanos"); static final ParseField CHILDREN = new ParseField("children"); private final String type; @@ -77,6 +86,9 @@ public final class ProfileResult implements Writeable, ToXContentObject { private final Map breakdown; private final Map debug; private final long nodeTime; + private Long maxSliceNodeTime; + private Long minSliceNodeTime; + private Long avgSliceNodeTime; private final List children; public ProfileResult( @@ -86,6 +98,20 @@ public ProfileResult( Map debug, long nodeTime, List children + ) { + this(type, description, breakdown, debug, nodeTime, children, null, null, null); + } + + public ProfileResult( + String type, + String description, + Map breakdown, + Map debug, + long nodeTime, + List children, + Long maxSliceNodeTime, + Long minSliceNodeTime, + Long avgSliceNodeTime ) { this.type = type; this.description = description; @@ -93,6 +119,9 @@ public ProfileResult( this.debug = debug == null ? Map.of() : debug; this.children = children == null ? List.of() : children; this.nodeTime = nodeTime; + this.maxSliceNodeTime = maxSliceNodeTime; + this.minSliceNodeTime = minSliceNodeTime; + this.avgSliceNodeTime = avgSliceNodeTime; } /** @@ -105,6 +134,15 @@ public ProfileResult(StreamInput in) throws IOException { breakdown = in.readMap(StreamInput::readString, StreamInput::readLong); debug = in.readMap(StreamInput::readString, StreamInput::readGenericValue); children = in.readList(ProfileResult::new); + if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + this.maxSliceNodeTime = in.readOptionalLong(); + this.minSliceNodeTime = in.readOptionalLong(); + this.avgSliceNodeTime = in.readOptionalLong(); + } else { + this.maxSliceNodeTime = null; + this.minSliceNodeTime = null; + this.avgSliceNodeTime = null; + } } @Override @@ -115,6 +153,11 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(breakdown, StreamOutput::writeString, StreamOutput::writeLong); out.writeMap(debug, StreamOutput::writeString, StreamOutput::writeGenericValue); out.writeList(children); + if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + out.writeOptionalLong(maxSliceNodeTime); + out.writeOptionalLong(minSliceNodeTime); + out.writeOptionalLong(avgSliceNodeTime); + } } /** @@ -154,6 +197,18 @@ public long getTime() { return nodeTime; } + public Long getMaxSliceTime() { + return maxSliceNodeTime; + } + + public Long getMinSliceTime() { + return minSliceNodeTime; + } + + public Long getAvgSliceTime() { + return avgSliceNodeTime; + } + /** * Returns a list of all profiled children queries */ @@ -168,9 +223,27 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(DESCRIPTION.getPreferredName(), description); if (builder.humanReadable()) { builder.field(NODE_TIME.getPreferredName(), new TimeValue(getTime(), TimeUnit.NANOSECONDS).toString()); + if (getMaxSliceTime() != null) { + builder.field(MAX_SLICE_NODE_TIME.getPreferredName(), new TimeValue(getMaxSliceTime(), TimeUnit.NANOSECONDS).toString()); + } + if (getMinSliceTime() != null) { + builder.field(MIN_SLICE_NODE_TIME.getPreferredName(), new TimeValue(getMinSliceTime(), TimeUnit.NANOSECONDS).toString()); + } + if (getAvgSliceTime() != null) { + builder.field(AVG_SLICE_NODE_TIME.getPreferredName(), new TimeValue(getAvgSliceTime(), TimeUnit.NANOSECONDS).toString()); + } } builder.field(NODE_TIME_RAW.getPreferredName(), getTime()); - builder.field(BREAKDOWN.getPreferredName(), breakdown); + if (getMaxSliceTime() != null) { + builder.field(MAX_SLICE_NODE_TIME_RAW.getPreferredName(), getMaxSliceTime()); + } + if (getMinSliceTime() != null) { + builder.field(MIN_SLICE_NODE_TIME_RAW.getPreferredName(), getMinSliceTime()); + } + if (getAvgSliceTime() != null) { + builder.field(AVG_SLICE_NODE_TIME_RAW.getPreferredName(), getAvgSliceTime()); + } + createBreakdownView(builder); if (false == debug.isEmpty()) { builder.field(DEBUG.getPreferredName(), debug); } @@ -186,6 +259,22 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder.endObject(); } + private void createBreakdownView(XContentBuilder builder) throws IOException { + Map modifiedBreakdown = new LinkedHashMap<>(breakdown); + removeStartTimeFields(modifiedBreakdown); + builder.field(BREAKDOWN.getPreferredName(), modifiedBreakdown); + } + + static void removeStartTimeFields(Map modifiedBreakdown) { + Iterator> iterator = modifiedBreakdown.entrySet().iterator(); + while (iterator.hasNext()) { + Map.Entry entry = iterator.next(); + if (entry.getKey().endsWith(AbstractProfileBreakdown.TIMING_TYPE_START_TIME_SUFFIX)) { + iterator.remove(); + } + } + } + private static final InstantiatingObjectParser PARSER; static { InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( @@ -199,6 +288,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws parser.declareObject(optionalConstructorArg(), (p, c) -> p.map(), DEBUG); parser.declareLong(constructorArg(), NODE_TIME_RAW); parser.declareObjectArray(optionalConstructorArg(), (p, c) -> fromXContent(p), CHILDREN); + parser.declareLong(optionalConstructorArg(), MAX_SLICE_NODE_TIME_RAW); + parser.declareLong(optionalConstructorArg(), MIN_SLICE_NODE_TIME_RAW); + parser.declareLong(optionalConstructorArg(), AVG_SLICE_NODE_TIME_RAW); PARSER = parser.build(); } diff --git a/server/src/main/java/org/opensearch/search/profile/Profilers.java b/server/src/main/java/org/opensearch/search/profile/Profilers.java index 2bc2f3a5a3920..8e87c7ff4acd4 100644 --- a/server/src/main/java/org/opensearch/search/profile/Profilers.java +++ b/server/src/main/java/org/opensearch/search/profile/Profilers.java @@ -34,6 +34,7 @@ import org.opensearch.search.internal.ContextIndexSearcher; import org.opensearch.search.profile.aggregation.AggregationProfiler; +import org.opensearch.search.profile.aggregation.ConcurrentAggregationProfiler; import org.opensearch.search.profile.query.QueryProfiler; import java.util.ArrayList; @@ -50,18 +51,20 @@ public final class Profilers { private final ContextIndexSearcher searcher; private final List queryProfilers; private final AggregationProfiler aggProfiler; + private final boolean isConcurrentSegmentSearchEnabled; /** Sole constructor. This {@link Profilers} instance will initially wrap one {@link QueryProfiler}. */ - public Profilers(ContextIndexSearcher searcher) { + public Profilers(ContextIndexSearcher searcher, boolean isConcurrentSegmentSearchEnabled) { this.searcher = searcher; + this.isConcurrentSegmentSearchEnabled = isConcurrentSegmentSearchEnabled; this.queryProfilers = new ArrayList<>(); - this.aggProfiler = new AggregationProfiler(); + this.aggProfiler = isConcurrentSegmentSearchEnabled ? new ConcurrentAggregationProfiler() : new AggregationProfiler(); addQueryProfiler(); } /** Switch to a new profile. */ public QueryProfiler addQueryProfiler() { - QueryProfiler profiler = new QueryProfiler(searcher.getExecutor() != null); + QueryProfiler profiler = new QueryProfiler(isConcurrentSegmentSearchEnabled); searcher.setProfiler(profiler); queryProfilers.add(profiler); return profiler; diff --git a/server/src/main/java/org/opensearch/search/profile/Timer.java b/server/src/main/java/org/opensearch/search/profile/Timer.java index 231324b4a5598..172762cabeb6a 100644 --- a/server/src/main/java/org/opensearch/search/profile/Timer.java +++ b/server/src/main/java/org/opensearch/search/profile/Timer.java @@ -51,7 +51,7 @@ public class Timer { private boolean doTiming; - private long timing, count, lastCount, start; + private long timing, count, lastCount, start, earliestTimerStartTime; /** pkg-private for testing */ long nanoTime() { @@ -71,6 +71,9 @@ public final void start() { doTiming = (count - lastCount) >= Math.min(lastCount >>> 8, 1024); if (doTiming) { start = nanoTime(); + if (count == 0) { + earliestTimerStartTime = start; + } } count++; } @@ -92,6 +95,14 @@ public final long getCount() { return count; } + /** Return the timer start time in nanoseconds.*/ + public final long getEarliestTimerStartTime() { + if (start != 0) { + throw new IllegalStateException("#start call misses a matching #stop call"); + } + return earliestTimerStartTime; + } + /** Return an approximation of the total time spent between consecutive calls of #start and #stop. */ public final long getApproximateTiming() { if (start != 0) { diff --git a/server/src/main/java/org/opensearch/search/profile/aggregation/AggregationProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/aggregation/AggregationProfileBreakdown.java index 24eccba575e77..d0c67915e6d8d 100644 --- a/server/src/main/java/org/opensearch/search/profile/aggregation/AggregationProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/aggregation/AggregationProfileBreakdown.java @@ -34,6 +34,7 @@ import org.opensearch.search.profile.AbstractProfileBreakdown; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -62,4 +63,18 @@ public void addDebugInfo(String key, Object value) { protected Map toDebugMap() { return unmodifiableMap(extra); } + + /** + * Build a timing count startTime breakdown for aggregation timing types + */ + @Override + public Map toBreakdownMap() { + Map map = new HashMap<>(timings.length * 3); + for (AggregationTimingType timingType : timingTypes) { + map.put(timingType.toString(), timings[timingType.ordinal()].getApproximateTiming()); + map.put(timingType + TIMING_TYPE_COUNT_SUFFIX, timings[timingType.ordinal()].getCount()); + map.put(timingType + TIMING_TYPE_START_TIME_SUFFIX, timings[timingType.ordinal()].getEarliestTimerStartTime()); + } + return Collections.unmodifiableMap(map); + } } diff --git a/server/src/main/java/org/opensearch/search/profile/aggregation/AggregationProfiler.java b/server/src/main/java/org/opensearch/search/profile/aggregation/AggregationProfiler.java index 1d2cf424ee5a7..39620c25dc5a3 100644 --- a/server/src/main/java/org/opensearch/search/profile/aggregation/AggregationProfiler.java +++ b/server/src/main/java/org/opensearch/search/profile/aggregation/AggregationProfiler.java @@ -36,8 +36,6 @@ import org.opensearch.search.profile.AbstractProfiler; import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; import java.util.Map; /** @@ -47,29 +45,25 @@ */ public class AggregationProfiler extends AbstractProfiler { - private final Map, AggregationProfileBreakdown> profileBreakdownLookup = new HashMap<>(); + private final Map profileBreakdownLookup = new HashMap<>(); public AggregationProfiler() { super(new InternalAggregationProfileTree()); } + /** + * This method does not need to be thread safe for concurrent search use case as well. + * The {@link AggregationProfileBreakdown} for each Aggregation operator is created in sync path when + * {@link org.opensearch.search.aggregations.BucketCollector#preCollection()} is called + * on the Aggregation collector instances during construction. + */ @Override public AggregationProfileBreakdown getQueryBreakdown(Aggregator agg) { - List path = getAggregatorPath(agg); - AggregationProfileBreakdown aggregationProfileBreakdown = profileBreakdownLookup.get(path); + AggregationProfileBreakdown aggregationProfileBreakdown = profileBreakdownLookup.get(agg); if (aggregationProfileBreakdown == null) { aggregationProfileBreakdown = super.getQueryBreakdown(agg); - profileBreakdownLookup.put(path, aggregationProfileBreakdown); + profileBreakdownLookup.put(agg, aggregationProfileBreakdown); } return aggregationProfileBreakdown; } - - public static List getAggregatorPath(Aggregator agg) { - LinkedList path = new LinkedList<>(); - while (agg != null) { - path.addFirst(agg.name()); - agg = agg.parent(); - } - return path; - } } diff --git a/server/src/main/java/org/opensearch/search/profile/aggregation/ConcurrentAggregationProfiler.java b/server/src/main/java/org/opensearch/search/profile/aggregation/ConcurrentAggregationProfiler.java new file mode 100644 index 0000000000000..7c9bf55a97de5 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/profile/aggregation/ConcurrentAggregationProfiler.java @@ -0,0 +1,199 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.search.profile.aggregation; + +import org.opensearch.search.profile.ProfileResult; +import org.opensearch.search.profile.AbstractProfileBreakdown; + +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +/** + * Main class to profile aggregations with concurrent execution + * + * @opensearch.internal + */ +public class ConcurrentAggregationProfiler extends AggregationProfiler { + + private static final String MAX_PREFIX = "max_"; + private static final String MIN_PREFIX = "min_"; + private static final String AVG_PREFIX = "avg_"; + private static final String START_TIME_KEY = AggregationTimingType.INITIALIZE + AbstractProfileBreakdown.TIMING_TYPE_START_TIME_SUFFIX; + private static final String[] breakdownCountStatsTypes = { "build_leaf_collector_count", "collect_count" }; + + @Override + public List getTree() { + List tree = profileTree.getTree(); + List reducedTree = new LinkedList<>(); + Map> sliceLevelAggregationMap = getSliceLevelAggregationMap(tree); + for (List profileResultsAcrossSlices : sliceLevelAggregationMap.values()) { + reducedTree.addAll(reduceProfileResultsTree(profileResultsAcrossSlices)); + } + return reducedTree; + } + + private List reduceProfileResultsTree(List profileResultsAcrossSlices) { + String type = profileResultsAcrossSlices.get(0).getQueryName(); + String description = profileResultsAcrossSlices.get(0).getLuceneDescription(); + long maxSliceNodeEndTime = Long.MIN_VALUE; + long minSliceNodeStartTime = Long.MAX_VALUE; + long maxSliceNodeTime = Long.MIN_VALUE; + long minSliceNodeTime = Long.MAX_VALUE; + long avgSliceNodeTime = 0L; + Map breakdown = new HashMap<>(); + Map timeStatsMap = new HashMap<>(); + Map minSliceStartTimeMap = new HashMap<>(); + Map maxSliceEndTimeMap = new HashMap<>(); + Map countStatsMap = new HashMap<>(); + Map debug = new HashMap<>(); + List children = new LinkedList<>(); + + for (ProfileResult profileResult : profileResultsAcrossSlices) { + long profileNodeTime = profileResult.getTime(); + long sliceStartTime = profileResult.getTimeBreakdown().get(START_TIME_KEY); + + // Profiled total time + maxSliceNodeEndTime = Math.max(maxSliceNodeEndTime, sliceStartTime + profileNodeTime); + minSliceNodeStartTime = Math.min(minSliceNodeStartTime, sliceStartTime); + + // Profiled total time stats + maxSliceNodeTime = Math.max(maxSliceNodeTime, profileNodeTime); + minSliceNodeTime = Math.min(minSliceNodeTime, profileNodeTime); + avgSliceNodeTime += profileNodeTime; + + // Profiled breakdown time stats + for (AggregationTimingType timingType : AggregationTimingType.values()) { + buildBreakdownStatsMap(timeStatsMap, profileResult, timingType.toString()); + } + + // Profiled breakdown total time + for (AggregationTimingType timingType : AggregationTimingType.values()) { + String breakdownTimingType = timingType.toString(); + Long startTime = profileResult.getTimeBreakdown() + .get(breakdownTimingType + AbstractProfileBreakdown.TIMING_TYPE_START_TIME_SUFFIX); + Long endTime = startTime + profileResult.getTimeBreakdown().get(breakdownTimingType); + minSliceStartTimeMap.put( + breakdownTimingType, + Math.min(minSliceStartTimeMap.getOrDefault(breakdownTimingType, Long.MAX_VALUE), startTime) + ); + maxSliceEndTimeMap.put( + breakdownTimingType, + Math.max(maxSliceEndTimeMap.getOrDefault(breakdownTimingType, Long.MIN_VALUE), endTime) + ); + } + + // Profiled breakdown count stats + for (String breakdownCountType : breakdownCountStatsTypes) { + buildBreakdownStatsMap(countStatsMap, profileResult, breakdownCountType); + } + + // Profiled breakdown count + for (AggregationTimingType timingType : AggregationTimingType.values()) { + String breakdownType = timingType.toString(); + String breakdownTypeCount = breakdownType + AbstractProfileBreakdown.TIMING_TYPE_COUNT_SUFFIX; + breakdown.put( + breakdownTypeCount, + breakdown.getOrDefault(breakdownTypeCount, 0L) + profileResult.getTimeBreakdown().get(breakdownTypeCount) + ); + } + + debug = profileResult.getDebugInfo(); + children.addAll(profileResult.getProfiledChildren()); + } + // nodeTime + long nodeTime = maxSliceNodeEndTime - minSliceNodeStartTime; + avgSliceNodeTime /= profileResultsAcrossSlices.size(); + + // Profiled breakdown time stats + for (AggregationTimingType breakdownTimingType : AggregationTimingType.values()) { + buildBreakdownMap(profileResultsAcrossSlices.size(), breakdown, timeStatsMap, breakdownTimingType.toString()); + } + + // Profiled breakdown total time + for (AggregationTimingType breakdownTimingType : AggregationTimingType.values()) { + String breakdownType = breakdownTimingType.toString(); + breakdown.put(breakdownType, maxSliceEndTimeMap.get(breakdownType) - minSliceStartTimeMap.get(breakdownType)); + } + + // Profiled breakdown count stats + for (String breakdownCountType : breakdownCountStatsTypes) { + buildBreakdownMap(profileResultsAcrossSlices.size(), breakdown, countStatsMap, breakdownCountType); + } + + // children + List reducedChildrenTree = new LinkedList<>(); + if (!children.isEmpty()) { + Map> sliceLevelAggregationMap = getSliceLevelAggregationMap(children); + for (List profileResults : sliceLevelAggregationMap.values()) { + reducedChildrenTree.addAll(reduceProfileResultsTree(profileResults)); + } + } + + ProfileResult reducedResult = new ProfileResult( + type, + description, + breakdown, + debug, + nodeTime, + reducedChildrenTree, + maxSliceNodeTime, + minSliceNodeTime, + avgSliceNodeTime + ); + return List.of(reducedResult); + } + + static void buildBreakdownMap(int treeSize, Map breakdown, Map statsMap, String breakdownType) { + String maxBreakdownType = MAX_PREFIX + breakdownType; + String minBreakdownType = MIN_PREFIX + breakdownType; + String avgBreakdownType = AVG_PREFIX + breakdownType; + breakdown.put(maxBreakdownType, statsMap.get(maxBreakdownType)); + breakdown.put(minBreakdownType, statsMap.get(minBreakdownType)); + breakdown.put(avgBreakdownType, statsMap.get(avgBreakdownType) / treeSize); + } + + static void buildBreakdownStatsMap(Map statsMap, ProfileResult result, String breakdownType) { + String maxBreakdownType = MAX_PREFIX + breakdownType; + String minBreakdownType = MIN_PREFIX + breakdownType; + String avgBreakdownType = AVG_PREFIX + breakdownType; + statsMap.put( + maxBreakdownType, + Math.max(statsMap.getOrDefault(maxBreakdownType, Long.MIN_VALUE), result.getTimeBreakdown().get(breakdownType)) + ); + statsMap.put( + minBreakdownType, + Math.min(statsMap.getOrDefault(minBreakdownType, Long.MAX_VALUE), result.getTimeBreakdown().get(breakdownType)) + ); + statsMap.put(avgBreakdownType, statsMap.getOrDefault(avgBreakdownType, 0L) + result.getTimeBreakdown().get(breakdownType)); + } + + /** + * @return a slice level aggregation map where the key is the description of the aggregation and + * the value is a list of ProfileResult across all slices. + */ + static Map> getSliceLevelAggregationMap(List tree) { + Map> sliceLevelAggregationMap = new HashMap<>(); + for (ProfileResult result : tree) { + String description = result.getLuceneDescription(); + final List sliceLevelAggregationList = sliceLevelAggregationMap.computeIfAbsent( + description, + k -> new LinkedList<>() + ); + sliceLevelAggregationList.add(result); + } + return sliceLevelAggregationMap; + } +} diff --git a/server/src/main/java/org/opensearch/search/profile/aggregation/InternalAggregationProfileTree.java b/server/src/main/java/org/opensearch/search/profile/aggregation/InternalAggregationProfileTree.java index 36cfc53f41ccd..34716b87c7c9c 100644 --- a/server/src/main/java/org/opensearch/search/profile/aggregation/InternalAggregationProfileTree.java +++ b/server/src/main/java/org/opensearch/search/profile/aggregation/InternalAggregationProfileTree.java @@ -62,6 +62,10 @@ protected String getTypeFromElement(Aggregator element) { return element.getClass().getSimpleName(); } + /** + * @return is used to group aggregations with same name across slices. + * So the name returned here should be same across slices for an aggregation operator. + */ @Override protected String getDescriptionFromElement(Aggregator element) { return element.name(); diff --git a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java index 74ef78bc93c5f..6f0c78e8b307d 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java @@ -44,10 +44,10 @@ public AbstractProfileBreakdown context(Object context) { @Override public Map toBreakdownMap() { - final Map map = new HashMap<>(buildBreakdownMap(this)); + final Map map = new HashMap<>(super.toBreakdownMap()); for (final AbstractProfileBreakdown context : contexts.values()) { - for (final Map.Entry entry : buildBreakdownMap(context).entrySet()) { + for (final Map.Entry entry : context.toBreakdownMap().entrySet()) { map.merge(entry.getKey(), entry.getValue(), Long::sum); } } diff --git a/server/src/test/java/org/opensearch/search/profile/ProfileResultTests.java b/server/src/test/java/org/opensearch/search/profile/ProfileResultTests.java index ae5a07478e814..70b876b41ba08 100644 --- a/server/src/test/java/org/opensearch/search/profile/ProfileResultTests.java +++ b/server/src/test/java/org/opensearch/search/profile/ProfileResultTests.java @@ -56,7 +56,7 @@ public class ProfileResultTests extends OpenSearchTestCase { - public static ProfileResult createTestItem(int depth) { + public static ProfileResult createTestItem(int depth, boolean concurrentSegmentSearchEnabled) { String type = randomAlphaOfLengthBetween(5, 10); String description = randomAlphaOfLengthBetween(5, 10); int breakdownsSize = randomIntBetween(0, 5); @@ -77,13 +77,28 @@ public static ProfileResult createTestItem(int depth) { int childrenSize = depth > 0 ? randomIntBetween(0, 1) : 0; List children = new ArrayList<>(childrenSize); for (int i = 0; i < childrenSize; i++) { - children.add(createTestItem(depth - 1)); + children.add(createTestItem(depth - 1, concurrentSegmentSearchEnabled)); + } + if (concurrentSegmentSearchEnabled) { + return new ProfileResult( + type, + description, + breakdown, + debug, + randomNonNegativeLong(), + children, + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong() + ); + } else { + return new ProfileResult(type, description, breakdown, debug, randomNonNegativeLong(), children); } - return new ProfileResult(type, description, breakdown, debug, randomNonNegativeLong(), children); } public void testFromXContent() throws IOException { - doFromXContentTestWithRandomFields(false); + doFromXContentTestWithRandomFields(false, false); + doFromXContentTestWithRandomFields(false, true); } /** @@ -91,11 +106,12 @@ public void testFromXContent() throws IOException { * back to be forward compatible with additions to the xContent */ public void testFromXContentWithRandomFields() throws IOException { - doFromXContentTestWithRandomFields(true); + doFromXContentTestWithRandomFields(true, false); + doFromXContentTestWithRandomFields(true, true); } - private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException { - ProfileResult profileResult = createTestItem(2); + private void doFromXContentTestWithRandomFields(boolean addRandomFields, boolean concurrentSegmentSearchEnabled) throws IOException { + ProfileResult profileResult = createTestItem(2, concurrentSegmentSearchEnabled); XContentType xContentType = randomFrom(XContentType.values()); boolean humanReadable = randomBoolean(); BytesReference originalBytes = toShuffledXContent(profileResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); @@ -116,6 +132,9 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws assertNull(parser.nextToken()); } assertEquals(profileResult.getTime(), parsed.getTime()); + assertEquals(profileResult.getMaxSliceTime(), parsed.getMaxSliceTime()); + assertEquals(profileResult.getMinSliceTime(), parsed.getMinSliceTime()); + assertEquals(profileResult.getAvgSliceTime(), parsed.getAvgSliceTime()); assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType); } @@ -238,5 +257,72 @@ public void testToXContent() throws IOException { + "}", Strings.toString(builder) ); + + result = new ProfileResult("profileName", "some description", Map.of("key1", 1234L), Map.of(), 1234L, List.of(), 321L, 123L, 222L); + builder = XContentFactory.jsonBuilder().prettyPrint(); + result.toXContent(builder, ToXContent.EMPTY_PARAMS); + assertEquals( + "{\n" + + " \"type\" : \"profileName\",\n" + + " \"description\" : \"some description\",\n" + + " \"time_in_nanos\" : 1234,\n" + + " \"max_slice_time_in_nanos\" : 321,\n" + + " \"min_slice_time_in_nanos\" : 123,\n" + + " \"avg_slice_time_in_nanos\" : 222,\n" + + " \"breakdown\" : {\n" + + " \"key1\" : 1234\n" + + " }\n" + + "}", + Strings.toString(builder) + ); + + result = new ProfileResult( + "profileName", + "some description", + Map.of("key1", 1234567890L), + Map.of(), + 1234567890L, + List.of(), + 87654321L, + 12345678L, + 54637281L + ); + builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true); + result.toXContent(builder, ToXContent.EMPTY_PARAMS); + assertEquals( + "{\n" + + " \"type\" : \"profileName\",\n" + + " \"description\" : \"some description\",\n" + + " \"time\" : \"1.2s\",\n" + + " \"max_slice_time\" : \"87.6ms\",\n" + + " \"min_slice_time\" : \"12.3ms\",\n" + + " \"avg_slice_time\" : \"54.6ms\",\n" + + " \"time_in_nanos\" : 1234567890,\n" + + " \"max_slice_time_in_nanos\" : 87654321,\n" + + " \"min_slice_time_in_nanos\" : 12345678,\n" + + " \"avg_slice_time_in_nanos\" : 54637281,\n" + + " \"breakdown\" : {\n" + + " \"key1\" : 1234567890\n" + + " }\n" + + "}", + Strings.toString(builder) + ); + + } + + public void testRemoveStartTimeFields() { + Map breakdown = new HashMap<>(); + breakdown.put("initialize_start_time", 123456L); + breakdown.put("initialize_count", 1L); + breakdown.put("initialize", 654321L); + Map modifiedBreakdown = new LinkedHashMap<>(breakdown); + assertEquals(3, modifiedBreakdown.size()); + assertEquals(123456L, (long) modifiedBreakdown.get("initialize_start_time")); + assertEquals(1L, (long) modifiedBreakdown.get("initialize_count")); + assertEquals(654321L, (long) modifiedBreakdown.get("initialize")); + ProfileResult.removeStartTimeFields(modifiedBreakdown); + assertFalse(modifiedBreakdown.containsKey("initialize_start_time")); + assertTrue(modifiedBreakdown.containsKey("initialize_count")); + assertTrue(modifiedBreakdown.containsKey("initialize")); } } diff --git a/server/src/test/java/org/opensearch/search/profile/TimerTests.java b/server/src/test/java/org/opensearch/search/profile/TimerTests.java index deed451c21933..5997292eb8f56 100644 --- a/server/src/test/java/org/opensearch/search/profile/TimerTests.java +++ b/server/src/test/java/org/opensearch/search/profile/TimerTests.java @@ -71,10 +71,14 @@ long nanoTime() { return time += 42; } }; - for (int i = 1; i < 100000; ++i) { + t.start(); + t.stop(); + long timerStartTime = t.getEarliestTimerStartTime(); + for (int i = 2; i < 100000; ++i) { t.start(); t.stop(); assertEquals(i, t.getCount()); + assertEquals(timerStartTime, t.getEarliestTimerStartTime()); // Make sure the cumulated timing is 42 times the number of calls as expected assertEquals(i * 42L, t.getApproximateTiming()); } diff --git a/server/src/test/java/org/opensearch/search/profile/aggregation/AggregationProfileShardResultTests.java b/server/src/test/java/org/opensearch/search/profile/aggregation/AggregationProfileShardResultTests.java index 33c95725dcd13..75fb846909aa1 100644 --- a/server/src/test/java/org/opensearch/search/profile/aggregation/AggregationProfileShardResultTests.java +++ b/server/src/test/java/org/opensearch/search/profile/aggregation/AggregationProfileShardResultTests.java @@ -57,7 +57,7 @@ public static AggregationProfileShardResult createTestItem(int depth) { int size = randomIntBetween(0, 5); List aggProfileResults = new ArrayList<>(size); for (int i = 0; i < size; i++) { - aggProfileResults.add(ProfileResultTests.createTestItem(1)); + aggProfileResults.add(ProfileResultTests.createTestItem(depth, false)); } return new AggregationProfileShardResult(aggProfileResults); } diff --git a/server/src/test/java/org/opensearch/search/profile/aggregation/ConcurrentAggregationProfilerTests.java b/server/src/test/java/org/opensearch/search/profile/aggregation/ConcurrentAggregationProfilerTests.java new file mode 100644 index 0000000000000..e36b65f0a7b69 --- /dev/null +++ b/server/src/test/java/org/opensearch/search/profile/aggregation/ConcurrentAggregationProfilerTests.java @@ -0,0 +1,182 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.search.profile.aggregation; + +import org.opensearch.search.profile.ProfileResult; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +public class ConcurrentAggregationProfilerTests extends OpenSearchTestCase { + + public static List createConcurrentSearchProfileTree() { + List tree = new ArrayList<>(); + // Aggregation + tree.add( + new ProfileResult( + "NumericTermsAggregator", + "test_scoped_agg", + new LinkedHashMap<>(), + new HashMap<>(), + 10847417L, + List.of( + new ProfileResult( + "GlobalOrdinalsStringTermsAggregator", + "test_terms", + new LinkedHashMap<>(), + new HashMap<>(), + 3359835L, + List.of(), + 1490667L, + 1180123L, + 1240676L + ) + ), + 94582L, + 18667L, + 211749L + ) + ); + tree.add( + new ProfileResult( + "NumericTermsAggregator", + "test_scoped_agg", + new LinkedHashMap<>(), + new HashMap<>(), + 10776655L, + List.of( + new ProfileResult( + "GlobalOrdinalsStringTermsAggregator", + "test_terms", + new LinkedHashMap<>(), + new HashMap<>(), + 3359567L, + List.of(), + 1390554L, + 1180321L, + 1298776L + ) + ), + 94560L, + 11237L, + 236440L + ) + ); + // Global Aggregation + tree.add( + new ProfileResult( + "GlobalAggregator", + "test_global_agg", + new LinkedHashMap<>(), + new HashMap<>(), + 19631335L, + List.of(), + 563002L, + 142210L, + 1216631L + ) + ); + tree.add( + new ProfileResult( + "GlobalAggregator", + "test_global_agg", + new LinkedHashMap<>(), + new HashMap<>(), + 19634567L, + List.of(), + 563333L, + 146783L, + 1496600L + ) + ); + return tree; + } + + public void testBuildTimeStatsBreakdownMap() { + List tree = createConcurrentSearchProfileTree(); + Map breakdown = new HashMap<>(); + Map timeStatsMap = new HashMap<>(); + timeStatsMap.put("max_initialize", 30L); + timeStatsMap.put("min_initialize", 10L); + timeStatsMap.put("avg_initialize", 60L); + ConcurrentAggregationProfiler.buildBreakdownMap(tree.size(), breakdown, timeStatsMap, "initialize"); + assertTrue(breakdown.containsKey("max_initialize")); + assertTrue(breakdown.containsKey("min_initialize")); + assertTrue(breakdown.containsKey("avg_initialize")); + assertEquals(30L, (long) breakdown.get("max_initialize")); + assertEquals(10L, (long) breakdown.get("min_initialize")); + assertEquals(15L, (long) breakdown.get("avg_initialize")); + } + + public void testBuildCountStatsBreakdownMap() { + List tree = createConcurrentSearchProfileTree(); + Map breakdown = new HashMap<>(); + Map countStatsMap = new HashMap<>(); + countStatsMap.put("max_collect_count", 3L); + countStatsMap.put("min_collect_count", 1L); + countStatsMap.put("avg_collect_count", 6L); + ConcurrentAggregationProfiler.buildBreakdownMap(tree.size(), breakdown, countStatsMap, "collect_count"); + assertTrue(breakdown.containsKey("max_collect_count")); + assertTrue(breakdown.containsKey("min_collect_count")); + assertTrue(breakdown.containsKey("avg_collect_count")); + assertEquals(3L, (long) breakdown.get("max_collect_count")); + assertEquals(1L, (long) breakdown.get("min_collect_count")); + assertEquals(1L, (long) breakdown.get("avg_collect_count")); + } + + public void testBuildBreakdownStatsMap() { + Map statsMap = new HashMap<>(); + ConcurrentAggregationProfiler.buildBreakdownStatsMap( + statsMap, + new ProfileResult("NumericTermsAggregator", "desc", Map.of("initialize", 100L), Map.of(), 130L, List.of()), + "initialize" + ); + assertTrue(statsMap.containsKey("max_initialize")); + assertTrue(statsMap.containsKey("min_initialize")); + assertTrue(statsMap.containsKey("avg_initialize")); + assertEquals(100L, (long) statsMap.get("max_initialize")); + assertEquals(100L, (long) statsMap.get("min_initialize")); + assertEquals(100L, (long) statsMap.get("avg_initialize")); + ConcurrentAggregationProfiler.buildBreakdownStatsMap( + statsMap, + new ProfileResult("NumericTermsAggregator", "desc", Map.of("initialize", 50L), Map.of(), 120L, List.of()), + "initialize" + ); + assertEquals(100L, (long) statsMap.get("max_initialize")); + assertEquals(50L, (long) statsMap.get("min_initialize")); + assertEquals(150L, (long) statsMap.get("avg_initialize")); + } + + public void testGetSliceLevelAggregationMap() { + List tree = createConcurrentSearchProfileTree(); + Map> aggregationMap = ConcurrentAggregationProfiler.getSliceLevelAggregationMap(tree); + assertEquals(2, aggregationMap.size()); + assertTrue(aggregationMap.containsKey("test_scoped_agg")); + assertTrue(aggregationMap.containsKey("test_global_agg")); + assertEquals(2, aggregationMap.get("test_scoped_agg").size()); + assertEquals(2, aggregationMap.get("test_global_agg").size()); + for (int slice_id : new int[] { 0, 1 }) { + assertEquals(1, aggregationMap.get("test_scoped_agg").get(slice_id).getProfiledChildren().size()); + assertEquals( + "test_terms", + aggregationMap.get("test_scoped_agg").get(slice_id).getProfiledChildren().get(0).getLuceneDescription() + ); + assertEquals(0, aggregationMap.get("test_global_agg").get(slice_id).getProfiledChildren().size()); + } + } +} diff --git a/server/src/test/java/org/opensearch/search/profile/query/QueryProfileShardResultTests.java b/server/src/test/java/org/opensearch/search/profile/query/QueryProfileShardResultTests.java index e703396f5cf02..b371015621647 100644 --- a/server/src/test/java/org/opensearch/search/profile/query/QueryProfileShardResultTests.java +++ b/server/src/test/java/org/opensearch/search/profile/query/QueryProfileShardResultTests.java @@ -54,7 +54,7 @@ public static QueryProfileShardResult createTestItem() { int size = randomIntBetween(0, 5); List queryProfileResults = new ArrayList<>(size); for (int i = 0; i < size; i++) { - queryProfileResults.add(ProfileResultTests.createTestItem(1)); + queryProfileResults.add(ProfileResultTests.createTestItem(1, false)); } CollectorResult profileCollector = CollectorResultTests.createTestItem(2, false); long rewriteTime = randomNonNegativeLong(); diff --git a/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java b/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java index 4e44791e77566..0ce63fbe2977e 100644 --- a/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java @@ -160,6 +160,7 @@ public TestSearchContext( this.indexShard = indexShard; this.queryShardContext = queryShardContext; this.searcher = searcher; + this.concurrentSegmentSearchEnabled = searcher != null && (searcher.getExecutor() != null); this.scrollContext = scrollContext; } @@ -686,7 +687,7 @@ public TestSearchContext withCleanQueryResult() { * Add profilers to the query */ public TestSearchContext withProfilers() { - this.profilers = new Profilers(searcher); + this.profilers = new Profilers(searcher, concurrentSegmentSearchEnabled); return this; } } From 0b1f875d34c7d723afd41396234f8f2003c6be2c Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Mon, 31 Jul 2023 12:53:38 -0500 Subject: [PATCH 29/75] [Remove] Deprecated Fractional ByteSizeValue support (#9005) This commit removes the deprecation warning for fractional bytes size values and, instead, throws an OpenSearchParseException if a user passes in a fraction byte value (e.g., 5.2b).. This leniency was deprecated a long time ago in Legacy 6.2 so the removal should come as no surprise to users. Signed-off-by: Nicholas Walter Knize --- CHANGELOG.md | 3 ++- .../ingest/common/BytesProcessorTests.java | 15 ++++++------ .../opensearch/common/unit/ByteSizeValue.java | 24 ++++--------------- .../common/unit/ByteSizeValueTests.java | 10 ++++---- 4 files changed, 19 insertions(+), 33 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7aa7d14a936a8..3967840434902 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -103,6 +103,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - [Refactor] MediaTypeParser to MediaTypeParserRegistry ([#8636](https://github.com/opensearch-project/OpenSearch/pull/8636)) - Create separate SourceLookup instance per segment slice in SignificantTextAggregatorFactory ([#8807](https://github.com/opensearch-project/OpenSearch/pull/8807)) - Add support for aggregation profiler with concurrent aggregation ([#8801](https://github.com/opensearch-project/OpenSearch/pull/8801)) +- [Remove] Deprecated Fractional ByteSizeValue support #9005 ([#9005](https://github.com/opensearch-project/OpenSearch/pull/9005)) ### Deprecated @@ -113,4 +114,4 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Security [Unreleased 3.0]: https://github.com/opensearch-project/OpenSearch/compare/2.x...HEAD -[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.10...2.x \ No newline at end of file +[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.10...2.x diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java index bbd9ff4c8b912..385d77418ee7b 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java @@ -33,6 +33,7 @@ package org.opensearch.ingest.common; import org.opensearch.OpenSearchException; +import org.opensearch.OpenSearchParseException; import org.opensearch.common.unit.ByteSizeUnit; import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.ingest.IngestDocument; @@ -40,8 +41,6 @@ import org.opensearch.ingest.RandomDocumentPicks; import org.hamcrest.CoreMatchers; -import static org.hamcrest.Matchers.equalTo; - public class BytesProcessorTests extends AbstractStringProcessorTestCase { private String modifiedInput; @@ -101,14 +100,16 @@ public void testMissingUnits() { assertThat(exception.getMessage(), CoreMatchers.containsString("unit is missing or unrecognized")); } - public void testFractional() throws Exception { + public void testFractional() { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "1.1kb"); Processor processor = newProcessor(fieldName, randomBoolean(), fieldName); - processor.execute(ingestDocument); - assertThat(ingestDocument.getFieldValue(fieldName, expectedResultType()), equalTo(1126L)); - assertWarnings( - "Fractional bytes values are deprecated. Use non-fractional bytes values instead: [1.1kb] found for setting " + "[Ingest Field]" + OpenSearchParseException e = expectThrows(OpenSearchParseException.class, () -> processor.execute(ingestDocument)); + assertThat( + e.getMessage(), + CoreMatchers.containsString( + "Fractional bytes values have been deprecated since Legacy 6.2. " + "Use non-fractional bytes values instead:" + ) ); } } diff --git a/server/src/main/java/org/opensearch/common/unit/ByteSizeValue.java b/server/src/main/java/org/opensearch/common/unit/ByteSizeValue.java index a123c79464727..7343915a52c0c 100644 --- a/server/src/main/java/org/opensearch/common/unit/ByteSizeValue.java +++ b/server/src/main/java/org/opensearch/common/unit/ByteSizeValue.java @@ -37,9 +37,6 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.logging.DeprecationLogger; -import org.opensearch.common.logging.LogConfigurator; -import org.opensearch.common.network.NetworkService; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; @@ -54,17 +51,6 @@ */ public class ByteSizeValue implements Writeable, Comparable, ToXContentFragment { - /** - * We have to lazy initialize the deprecation logger as otherwise a static logger here would be constructed before logging is configured - * leading to a runtime failure (see {@link LogConfigurator#checkErrorListener()} ). The premature construction would come from any - * {@link ByteSizeValue} object constructed in, for example, settings in {@link NetworkService}. - * - * @opensearch.internal - */ - static class DeprecationLoggerHolder { - static DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ByteSizeValue.class); - } - public static final ByteSizeValue ZERO = new ByteSizeValue(0, ByteSizeUnit.BYTES); private final long size; @@ -262,14 +248,14 @@ private static ByteSizeValue parse( return new ByteSizeValue(Long.parseLong(s), unit); } catch (final NumberFormatException e) { try { - final double doubleValue = Double.parseDouble(s); - DeprecationLoggerHolder.deprecationLogger.deprecate( - "fractional_byte_values", - "Fractional bytes values are deprecated. Use non-fractional bytes values instead: [{}] found for setting [{}]", + Double.parseDouble(s); + throw new OpenSearchParseException( + "Failed to parse bytes value [{}]. Fractional bytes values have been " + + "deprecated since Legacy 6.2. Use non-fractional bytes values instead: found for setting [{}]", + e, initialInput, settingName ); - return new ByteSizeValue((long) (doubleValue * unit.toBytes(1))); } catch (final NumberFormatException ignored) { throw new OpenSearchParseException("failed to parse [{}]", e, initialInput); } diff --git a/server/src/test/java/org/opensearch/common/unit/ByteSizeValueTests.java b/server/src/test/java/org/opensearch/common/unit/ByteSizeValueTests.java index 99c1feb78527f..7f6f753209a61 100644 --- a/server/src/test/java/org/opensearch/common/unit/ByteSizeValueTests.java +++ b/server/src/test/java/org/opensearch/common/unit/ByteSizeValueTests.java @@ -336,12 +336,10 @@ public void testParseInvalidNumber() throws IOException { public void testParseFractionalNumber() throws IOException { ByteSizeUnit unit = randomValueOtherThan(ByteSizeUnit.BYTES, () -> randomFrom(ByteSizeUnit.values())); String fractionalValue = "23.5" + unit.getSuffix(); - ByteSizeValue instance = ByteSizeValue.parseBytesSizeValue(fractionalValue, "test"); - assertEquals(fractionalValue, instance.toString()); - assertWarnings( - "Fractional bytes values are deprecated. Use non-fractional bytes values instead: [" - + fractionalValue - + "] found for setting [test]" + // test exception is thrown: fractional byte size values has been deprecated since Legacy 6.2 + OpenSearchParseException e = expectThrows( + OpenSearchParseException.class, + () -> ByteSizeValue.parseBytesSizeValue(fractionalValue, "test") ); } From 6c075aec4dc1636b832fc226d22a1521f90eb1d4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 Jul 2023 11:31:31 -0700 Subject: [PATCH 30/75] Bump com.google.cloud:google-cloud-core-http from 2.21.0 to 2.21.1 in /plugins/repository-gcs (#8999) * Bump com.google.cloud:google-cloud-core-http from 2.21.0 to 2.21.1 in /plugins/repository-gcs Dependabot couldn't find the original pull request head commit, 66865c36594308e4b119a5388a78a59c1fc0ba2c. * Updating SHAs Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 1 + plugins/repository-gcs/build.gradle | 2 +- .../licenses/google-cloud-core-http-2.21.0.jar.sha1 | 1 - .../licenses/google-cloud-core-http-2.21.1.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-gcs/licenses/google-cloud-core-http-2.21.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-cloud-core-http-2.21.1.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 3967840434902..b656c3b8ebb8e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -94,6 +94,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `hadoop` libraries from 3.3.4 to 3.3.6 ([#6995](https://github.com/opensearch-project/OpenSearch/pull/6995)) - Bump `com.gradle.enterprise` from 3.13.3 to 3.14.1 ([#8996](https://github.com/opensearch-project/OpenSearch/pull/8996)) - Bump `org.apache.commons:commons-lang3` from 3.12.0 to 3.13.0 ([#8995](https://github.com/opensearch-project/OpenSearch/pull/8995)) +- Bump `com.google.cloud:google-cloud-core-http` from 2.21.0 to 2.21.1 ([#8999](https://github.com/opensearch-project/OpenSearch/pull/8999)) ### Changed - Perform aggregation postCollection in ContextIndexSearcher after searching leaves ([#8303](https://github.com/opensearch-project/OpenSearch/pull/8303)) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index fd2f713dd33cd..0fb98c8b9abff 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -67,7 +67,7 @@ dependencies { api "com.google.auth:google-auth-library-oauth2-http:${versions.google_auth}" api 'com.google.cloud:google-cloud-core:2.5.10' - api 'com.google.cloud:google-cloud-core-http:2.21.0' + api 'com.google.cloud:google-cloud-core-http:2.21.1' api 'com.google.cloud:google-cloud-storage:1.113.1' api 'com.google.code.gson:gson:2.9.0' diff --git a/plugins/repository-gcs/licenses/google-cloud-core-http-2.21.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-http-2.21.0.jar.sha1 deleted file mode 100644 index 2ef0a9bf9b33e..0000000000000 --- a/plugins/repository-gcs/licenses/google-cloud-core-http-2.21.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -07da4710ccdbcfee253672c0b9e00e7370626c26 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-http-2.21.1.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-http-2.21.1.jar.sha1 new file mode 100644 index 0000000000000..cc5e7a53098ac --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-core-http-2.21.1.jar.sha1 @@ -0,0 +1 @@ +88dd2b413dd06826c611e39e6e3259e069f02f66 \ No newline at end of file From dea68894fffdf1df406f5f5c0cd4f27ec8f1368f Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Mon, 31 Jul 2023 15:44:57 -0500 Subject: [PATCH 31/75] [Bug] Fix Setting#byteSizeSetting to use whole value string (#9011) This fixes a bug where the Setting#byteSizeSetting static method returned fractional byte values when calling .toString inside the lambda. The change is to directly return the bytes value as a string instead of using ByteSizeValue#toString which tries to be smart and by pretty printing in a more human-friendly format. Signed-off-by: Nicholas Walter Knize --- .../src/main/java/org/opensearch/common/settings/Setting.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/opensearch/common/settings/Setting.java b/server/src/main/java/org/opensearch/common/settings/Setting.java index 3bf2988e88e5a..86d1d4f90ed18 100644 --- a/server/src/main/java/org/opensearch/common/settings/Setting.java +++ b/server/src/main/java/org/opensearch/common/settings/Setting.java @@ -40,6 +40,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.Strings; import org.opensearch.common.collect.Tuple; +import org.opensearch.common.unit.ByteSizeUnit; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; @@ -2047,7 +2048,7 @@ static boolean parseBoolean(String b, String key, boolean isFiltered) { } public static Setting byteSizeSetting(String key, ByteSizeValue value, Property... properties) { - return byteSizeSetting(key, (s) -> value.toString(), properties); + return byteSizeSetting(key, (s) -> value.getBytes() + ByteSizeUnit.BYTES.getSuffix(), properties); } public static Setting byteSizeSetting(String key, Setting fallbackSetting, Property... properties) { From 639d28ad46927f8fbb6c940bc7efd4e935528526 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Mon, 31 Jul 2023 19:11:22 -0500 Subject: [PATCH 32/75] [Refactor] CircuitBreaker foundation classes to core library (#9006) This commit refactors the following CircuitBreaker foundation classes from the server module to the core library: * o.o.common.breaker.CircuitBreaker * o.o.common.breaker.CircuitBreakingException * o.o.common.breaker.NoopCircuitBreaker * o.o.indices.breaker.AllCircuitBreakerStats * o.o.indices.breaker.CircuitBreakerService * o.o.indices.breaker.CircuitBreakerStats * o.o.indices.breaker.NoneCircuitBreakerService The following base support classes are also refactored from the server to appropriate common or core libraries as a side effect: * o.o.common.component.AbstractLifecycleComponent * o.o.common.component.Lifecycle * o.o.common.component.LifecycleComponent * o.o.common.component.LifecycleListener * o.o.common.unit.ByteSizeUnit * o.o.common.unit.ByteSizeValue Tests and documentation are updated. Signed-off-by: Nicholas Walter Knize --- .../aggregations/TermsReduceBenchmark.java | 6 ++--- .../remote/filecache/FileCacheBenchmark.java | 4 ++-- .../indices/DataStreamsStatsResponse.java | 2 +- .../client/indices/ResizeRequest.java | 2 +- .../indices/rollover/RolloverRequest.java | 2 +- .../opensearch/client/BulkProcessorIT.java | 4 ++-- .../opensearch/client/ClusterClientIT.java | 2 +- .../java/org/opensearch/client/CrudIT.java | 4 ++-- .../opensearch/client/IndicesClientIT.java | 4 ++-- .../client/IndicesRequestConvertersTests.java | 2 +- .../SnapshotRequestConvertersTests.java | 2 +- .../documentation/CRUDDocumentationIT.java | 4 ++-- .../ClusterClientDocumentationIT.java | 2 +- .../IndicesClientDocumentationIT.java | 4 ++-- .../DataStreamsStatsResponseTests.java | 2 +- .../rollover/RolloverRequestTests.java | 2 +- .../rollover/RolloverResponseTests.java | 2 +- .../AbstractLifecycleComponent.java | 2 +- .../common/lifecycle}/Lifecycle.java | 2 +- .../common/lifecycle}/LifecycleComponent.java | 2 +- .../common/lifecycle}/LifecycleListener.java | 2 +- .../common/lifecycle/package-info.java | 16 +++++++++++++ .../org/opensearch/OpenSearchException.java | 16 +++++++++++++ .../opensearch/OpenSearchParseException.java | 0 .../org/opensearch/core/common/Strings.java | 23 +++++++++++++++++++ .../core}/common/breaker/CircuitBreaker.java | 2 +- .../breaker/CircuitBreakingException.java | 2 +- .../common/breaker/NoopCircuitBreaker.java | 2 +- .../core/common/breaker}/package-info.java | 6 +++-- .../core}/common/unit/ByteSizeUnit.java | 2 +- .../core}/common/unit/ByteSizeValue.java | 4 ++-- .../core/common/unit/package-info.java | 16 +++++++++++++ .../breaker/AllCircuitBreakerStats.java | 2 +- .../breaker/CircuitBreakerService.java | 6 ++--- .../indices/breaker/CircuitBreakerStats.java | 4 ++-- .../breaker/NoneCircuitBreakerService.java | 6 ++--- .../core/indices/breaker/package-info.java | 15 ++++++++++++ .../opensearch/core/indices/package-info.java | 12 ++++++++++ .../core}/common/unit/ByteSizeUnitTests.java | 14 +++++------ .../core}/common/unit/ByteSizeValueTests.java | 2 +- .../stats/InternalMatrixStatsTests.java | 2 +- .../ingest/common/BytesProcessor.java | 2 +- .../ingest/common/BytesProcessorTests.java | 4 ++-- .../painless/api/LimitedCharSequence.java | 4 ++-- .../opensearch/painless/RegexLimitTests.java | 2 +- .../api/LimitedCharSequenceTests.java | 2 +- .../percolator/PercolateQueryBuilder.java | 4 ++-- .../index/rankeval/RankEvalResponseTests.java | 4 ++-- .../AbstractAsyncBulkByScrollAction.java | 2 +- .../RemoteScrollableHitSourceTests.java | 4 ++-- .../url/URLSnapshotRestoreIT.java | 2 +- .../common/blobstore/url/URLBlobStore.java | 4 ++-- .../netty4/Netty4HttpRequestSizeLimitIT.java | 4 ++-- .../opensearch/rest/Netty4BadRequestIT.java | 2 +- .../netty4/Netty4HttpServerTransport.java | 4 ++-- .../transport/CopyBytesSocketChannel.java | 2 +- .../transport/Netty4ModulePlugin.java | 2 +- .../opensearch/transport/NettyAllocator.java | 2 +- .../transport/netty4/Netty4Transport.java | 6 ++--- .../http/netty4/Netty4BadRequestTests.java | 2 +- .../http/netty4/Netty4HttpClient.java | 4 ++-- .../Netty4HttpServerPipeliningTests.java | 2 +- .../Netty4HttpServerTransportTests.java | 4 ++-- .../Netty4SizeHeaderFrameDecoderTests.java | 2 +- .../transport/netty4/Netty4UtilsTests.java | 4 ++-- .../netty4/NettyTransportMultiPortTests.java | 4 ++-- .../netty4/SimpleNetty4TransportTests.java | 2 +- .../management/AzureComputeServiceImpl.java | 2 +- .../discovery/ec2/Ec2DiscoveryTests.java | 2 +- .../discovery/ec2/Ec2RetriesTests.java | 2 +- .../cloud/gce/GceMetadataService.java | 2 +- .../azure/AzureBlobStoreRepositoryTests.java | 2 +- .../repositories/azure/AzureRepository.java | 2 +- .../azure/AzureStorageService.java | 4 ++-- .../azure/AzureBlobContainerRetriesTests.java | 2 +- .../azure/AzureRepositorySettingsTests.java | 4 ++-- ...eCloudStorageBlobStoreRepositoryTests.java | 4 ++-- .../gcs/GoogleCloudStorageBlobStore.java | 4 ++-- .../gcs/GoogleCloudStorageRepository.java | 4 ++-- ...CloudStorageBlobContainerRetriesTests.java | 2 +- .../repositories/hdfs/HdfsRepository.java | 2 +- .../s3/S3BlobStoreRepositoryTests.java | 2 +- .../repositories/s3/S3BlobContainer.java | 4 ++-- .../repositories/s3/S3BlobStore.java | 2 +- .../repositories/s3/S3Repository.java | 4 ++-- .../s3/async/AsyncTransferManager.java | 2 +- .../s3/S3BlobContainerMockClientTests.java | 2 +- .../s3/S3BlobContainerRetriesTests.java | 4 ++-- .../s3/S3BlobStoreContainerTests.java | 2 +- .../repositories/s3/S3RepositoryTests.java | 4 ++-- .../s3/async/AsyncTransferManagerTests.java | 2 +- .../http/nio/NioHttpServerTransport.java | 2 +- .../transport/nio/NioTransport.java | 2 +- .../transport/nio/NioTransportPlugin.java | 2 +- .../transport/nio/TcpReadWriteHandler.java | 2 +- .../http/nio/HttpReadWriteHandlerTests.java | 2 +- .../opensearch/http/nio/NioHttpClient.java | 4 ++-- .../http/nio/NioHttpServerTransportTests.java | 4 ++-- .../nio/SimpleNioTransportTests.java | 2 +- .../admin/indices/create/ShrinkIndexIT.java | 2 +- .../admin/indices/rollover/RolloverIT.java | 4 ++-- .../action/bulk/BulkProcessorIT.java | 4 ++-- .../action/search/TransportSearchIT.java | 2 +- .../cluster/SimpleClusterStateIT.java | 2 +- .../decider/DiskThresholdDeciderIT.java | 4 ++-- .../cluster/settings/ClusterSettingsIT.java | 2 +- .../cluster/shards/ClusterShardLimitIT.java | 2 +- .../gateway/ReplicaShardAllocatorIT.java | 4 ++-- .../opensearch/index/shard/IndexShardIT.java | 6 ++--- .../RemoveCorruptedShardDataCommandIT.java | 4 ++-- .../index/store/CorruptedFileIT.java | 4 ++-- .../index/store/CorruptedTranslogIT.java | 4 ++-- .../breaker/CircuitBreakerServiceIT.java | 18 +++++++-------- .../RandomExceptionCircuitBreakerIT.java | 2 +- .../indices/recovery/IndexRecoveryIT.java | 8 +++---- .../indices/state/CloseIndexIT.java | 4 ++-- .../recovery/TruncatedRecoveryIT.java | 4 ++-- .../RemoteStoreBackpressureIT.java | 2 +- .../fs/FsBlobStoreRepositoryIT.java | 4 ++-- .../CardinalityWithRequestBreakerIT.java | 2 +- .../CorruptedBlobStoreRepositoryIT.java | 2 +- .../opensearch/snapshots/RepositoriesIT.java | 2 +- .../snapshots/RestoreSnapshotIT.java | 2 +- .../snapshots/SearchableSnapshotIT.java | 2 +- .../SharedClusterSnapshotRestoreIT.java | 2 +- .../snapshots/SnapshotStatusApisIT.java | 2 +- .../opensearch/OpenSearchServerException.java | 16 ------------- .../org/opensearch/action/ActionModule.java | 2 +- .../admin/cluster/node/info/NodeInfo.java | 2 +- .../admin/cluster/node/stats/NodeStats.java | 2 +- .../snapshots/status/SnapshotStats.java | 2 +- .../cluster/stats/ClusterStatsNodes.java | 2 +- .../create/CreateIndexRequestBuilder.java | 5 ++-- .../datastream/DataStreamsStatsAction.java | 2 +- .../admin/indices/rollover/Condition.java | 2 +- .../indices/rollover/MaxSizeCondition.java | 4 ++-- .../indices/rollover/RolloverRequest.java | 2 +- .../rollover/RolloverRequestBuilder.java | 2 +- .../rollover/TransportRolloverAction.java | 2 +- .../admin/indices/shrink/ResizeRequest.java | 2 +- .../indices/shrink/ResizeRequestBuilder.java | 2 +- .../indices/shrink/TransportResizeAction.java | 2 +- .../admin/indices/stats/CommonStats.java | 2 +- .../upgrade/get/UpgradeStatusResponse.java | 2 +- .../opensearch/action/bulk/BulkProcessor.java | 4 ++-- .../opensearch/action/index/IndexRequest.java | 2 +- .../search/QueryPhaseResultConsumer.java | 4 ++-- .../action/search/SearchPhaseController.java | 2 +- .../action/search/TransportSearchAction.java | 4 ++-- .../replication/ReplicationOperation.java | 2 +- .../org/opensearch/cluster/ClusterInfo.java | 2 +- .../org/opensearch/cluster/DiskUsage.java | 4 ++-- .../cluster/NodeConnectionsService.java | 2 +- .../cluster/coordination/Coordinator.java | 2 +- .../routing/DelayedAllocationService.java | 2 +- .../allocation/DiskThresholdSettings.java | 8 +++---- .../allocation/NodeAllocationResult.java | 2 +- .../decider/DiskThresholdDecider.java | 4 ++-- .../service/ClusterApplierService.java | 2 +- .../cluster/service/ClusterService.java | 2 +- .../cluster/service/MasterService.java | 2 +- .../opensearch/common/FieldMemoryStats.java | 2 +- .../java/org/opensearch/common/Strings.java | 23 ------------------- .../breaker/ChildMemoryCircuitBreaker.java | 4 +++- .../common/io/DiskIoBufferPool.java | 2 +- .../common/network/NetworkModule.java | 2 +- .../common/network/NetworkService.java | 2 +- .../opensearch/common/settings/Setting.java | 14 +++++------ .../opensearch/common/settings/Settings.java | 6 ++--- .../common/settings/WriteableSetting.java | 2 +- .../common/unit/MemorySizeValue.java | 4 +++- .../org/opensearch/common/unit/SizeValue.java | 2 +- .../org/opensearch/common/util/BigArrays.java | 6 ++--- .../common/util/PageCacheRecycler.java | 2 +- .../concurrent/AbstractLifecycleRunnable.java | 2 +- .../xcontent/XContentOpenSearchExtension.java | 2 +- .../org/opensearch/discovery/Discovery.java | 2 +- .../discovery/SeedHostsResolver.java | 2 +- .../org/opensearch/env/NodeEnvironment.java | 2 +- .../opensearch/gateway/GatewayService.java | 2 +- .../gateway/ReplicaShardAllocator.java | 2 +- .../http/AbstractHttpServerTransport.java | 4 ++-- .../java/org/opensearch/http/HttpInfo.java | 2 +- .../opensearch/http/HttpServerTransport.java | 2 +- .../http/HttpTransportSettings.java | 4 ++-- .../org/opensearch/index/IndexModule.java | 2 +- .../org/opensearch/index/IndexService.java | 2 +- .../org/opensearch/index/IndexSettings.java | 4 ++-- .../opensearch/index/IndexingPressure.java | 2 +- .../opensearch/index/MergePolicyConfig.java | 4 ++-- .../index/SegmentReplicationShardStats.java | 2 +- .../index/cache/query/QueryCacheStats.java | 2 +- .../cache/request/RequestCacheStats.java | 2 +- .../org/opensearch/index/engine/Engine.java | 2 +- .../opensearch/index/engine/EngineConfig.java | 4 ++-- .../index/engine/EngineConfigFactory.java | 2 +- .../index/engine/InternalEngine.java | 2 +- .../index/engine/NRTReplicationEngine.java | 2 +- .../OpenSearchConcurrentMergeScheduler.java | 2 +- .../org/opensearch/index/engine/Segment.java | 2 +- .../index/engine/SegmentsStats.java | 2 +- .../index/fielddata/FieldDataStats.java | 2 +- .../index/fielddata/IndexFieldData.java | 2 +- .../fielddata/IndexFieldDataService.java | 2 +- .../fielddata/RamAccountingTermsEnum.java | 2 +- .../ordinals/GlobalOrdinalsBuilder.java | 4 ++-- .../plain/AbstractGeoShapeIndexFieldData.java | 2 +- .../plain/AbstractIndexOrdinalsFieldData.java | 2 +- .../AbstractLatLonPointIndexFieldData.java | 2 +- .../fielddata/plain/BinaryIndexFieldData.java | 2 +- .../plain/BytesBinaryIndexFieldData.java | 2 +- .../plain/ConstantIndexFieldData.java | 2 +- .../plain/PagedBytesIndexFieldData.java | 4 ++-- .../plain/SortedNumericIndexFieldData.java | 2 +- .../SortedSetOrdinalsIndexFieldData.java | 2 +- .../index/mapper/IdFieldMapper.java | 2 +- .../opensearch/index/merge/MergeStats.java | 2 +- .../opensearch/index/shard/IndexShard.java | 4 ++-- .../index/shard/PrimaryReplicaSyncer.java | 4 ++-- .../opensearch/index/shard/StoreRecovery.java | 2 +- .../BlobStoreIndexShardSnapshot.java | 2 +- .../stats/IndexingPressurePerShardStats.java | 2 +- .../index/stats/IndexingPressureStats.java | 2 +- .../opensearch/index/store/StoreStats.java | 2 +- .../store/remote/filecache/FileCache.java | 4 ++-- .../remote/filecache/FileCacheFactory.java | 2 +- .../remote/filecache/FileCacheStats.java | 2 +- .../index/translog/TranslogConfig.java | 4 ++-- .../index/translog/TranslogStats.java | 2 +- .../index/translog/TranslogWriter.java | 2 +- .../indices/IndexingMemoryController.java | 4 ++-- .../opensearch/indices/IndicesQueryCache.java | 2 +- .../indices/IndicesRequestCache.java | 2 +- .../opensearch/indices/IndicesService.java | 8 +++---- .../indices/breaker/BreakerSettings.java | 4 ++-- .../HierarchyCircuitBreakerService.java | 13 +++++++---- .../cluster/IndicesClusterStateService.java | 2 +- .../cache/IndicesFieldDataCache.java | 2 +- .../recovery/PeerRecoverySourceService.java | 2 +- .../recovery/PeerRecoveryTargetService.java | 2 +- .../RecoverFilesRecoveryException.java | 2 +- .../indices/recovery/RecoverySettings.java | 4 ++-- .../recovery/RecoverySourceHandler.java | 2 +- .../recovery/RetryableTransportClient.java | 2 +- .../SegmentReplicationSourceService.java | 2 +- .../common/ReplicationLuceneIndex.java | 2 +- .../opensearch/monitor/MonitorService.java | 2 +- .../monitor/fs/FsHealthService.java | 2 +- .../org/opensearch/monitor/fs/FsInfo.java | 2 +- .../org/opensearch/monitor/fs/FsProbe.java | 2 +- .../monitor/jvm/JvmGcMonitorService.java | 4 ++-- .../org/opensearch/monitor/jvm/JvmInfo.java | 2 +- .../org/opensearch/monitor/jvm/JvmStats.java | 2 +- .../org/opensearch/monitor/os/OsStats.java | 2 +- .../monitor/process/ProcessStats.java | 2 +- .../main/java/org/opensearch/node/Node.java | 14 +++++------ .../java/org/opensearch/node/NodeService.java | 2 +- .../plugins/CircuitBreakerPlugin.java | 4 ++-- .../org/opensearch/plugins/NetworkPlugin.java | 2 +- .../java/org/opensearch/plugins/Plugin.java | 2 +- .../opensearch/plugins/PluginsService.java | 2 +- .../repositories/FilterRepository.java | 4 ++-- .../repositories/RepositoriesService.java | 2 +- .../opensearch/repositories/Repository.java | 2 +- .../blobstore/BlobStoreRepository.java | 6 ++--- .../repositories/fs/FsRepository.java | 2 +- .../org/opensearch/rest/RestController.java | 4 ++-- .../java/org/opensearch/rest/RestRequest.java | 4 ++-- .../rest/action/cat/RestAllocationAction.java | 2 +- .../cat/RestCatSegmentReplicationAction.java | 2 +- .../rest/action/cat/RestFielddataAction.java | 2 +- .../rest/action/cat/RestNodesAction.java | 2 +- .../opensearch/rest/action/cat/RestTable.java | 2 +- .../org/opensearch/script/ScriptCache.java | 4 ++-- .../org/opensearch/search/SearchService.java | 6 ++--- .../search/aggregations/AggregatorBase.java | 6 ++--- .../MultiBucketConsumerService.java | 2 +- .../SearchBackpressureService.java | 2 +- .../trackers/HeapUsageTracker.java | 2 +- .../suggest/completion/CompletionStats.java | 2 +- .../snapshots/SnapshotShardsService.java | 2 +- .../snapshots/SnapshotsService.java | 2 +- .../TaskCancellationMonitoringService.java | 2 +- .../org/opensearch/tasks/TaskManager.java | 2 +- .../transport/InboundAggregator.java | 4 ++-- .../opensearch/transport/InboundPipeline.java | 2 +- .../opensearch/transport/TcpTransport.java | 10 ++++---- .../org/opensearch/transport/Transport.java | 2 +- .../transport/TransportKeepAlive.java | 2 +- .../transport/TransportService.java | 2 +- .../transport/TransportSettings.java | 2 +- .../opensearch/transport/TransportStats.java | 2 +- .../ExceptionSerializationTests.java | 8 +++---- .../cluster/node/stats/NodeStatsTests.java | 4 ++-- .../node/tasks/TaskManagerTestCase.java | 2 +- ...TransportClearIndicesCacheActionTests.java | 2 +- .../DataStreamsStatsResponseTests.java | 2 +- .../indices/rollover/ConditionTests.java | 4 ++-- .../rollover/RolloverRequestTests.java | 4 ++-- .../rollover/RolloverResponseTests.java | 2 +- .../TransportRolloverActionTests.java | 4 ++-- .../indices/shrink/ResizeRequestTests.java | 2 +- .../shrink/TransportResizeActionTests.java | 2 +- .../action/bulk/BulkProcessorTests.java | 4 ++-- .../action/index/IndexRequestTests.java | 2 +- ...TransportResyncReplicationActionTests.java | 2 +- .../action/search/DfsQueryPhaseTests.java | 4 ++-- .../action/search/FetchSearchPhaseTests.java | 4 ++-- .../search/QueryPhaseResultConsumerTests.java | 4 ++-- .../search/SearchPhaseControllerTests.java | 6 ++--- .../SearchQueryThenFetchAsyncActionTests.java | 4 ++-- .../BroadcastReplicationTests.java | 4 ++-- .../ReplicationOperationTests.java | 4 ++-- .../replication/ReplicationResponseTests.java | 4 ++-- .../TransportReplicationActionTests.java | 2 +- .../cluster/NodeConnectionsServiceTests.java | 4 ++-- .../cluster/metadata/IndexMetadataTests.java | 2 +- .../DiskThresholdSettingsTests.java | 2 +- .../java/org/opensearch/common/UUIDTests.java | 2 +- .../RecyclingBytesStreamOutputTests.java | 2 +- .../ReleasableBytesStreamOutputTests.java | 2 +- .../common/network/NetworkModuleTests.java | 4 ++-- .../settings/MemorySizeSettingsTests.java | 2 +- .../common/settings/SettingTests.java | 4 ++-- .../common/settings/SettingsTests.java | 4 ++-- .../settings/WriteableSettingTests.java | 4 ++-- .../common/util/BigArraysTests.java | 8 +++---- .../common/util/BinarySearcherTests.java | 2 +- .../opensearch/common/util/BitArrayTests.java | 12 +++++----- .../common/util/BytesRefHashTests.java | 2 +- .../opensearch/common/util/LongHashTests.java | 2 +- .../common/util/LongLongHashTests.java | 2 +- .../common/util/LongObjectHashMapTests.java | 2 +- .../AbstractLifecycleRunnableTests.java | 2 +- .../FileBasedSeedHostsProviderTests.java | 2 +- .../discovery/SeedHostsResolverTests.java | 2 +- .../env/NodeRepurposeCommandTests.java | 4 ++-- .../extensions/ExtensionsManagerTests.java | 2 +- ...ExtensionTransportActionsHandlerTests.java | 2 +- .../RestInitializeExtensionActionTests.java | 2 +- .../rest/RestSendToExtensionActionTests.java | 2 +- .../settings/RegisterCustomSettingsTests.java | 4 ++-- .../GatewayMetaStatePersistedStateTests.java | 2 +- .../PersistedClusterStateServiceTests.java | 2 +- .../AbstractHttpServerTransportTests.java | 2 +- .../http/DefaultRestChannelTests.java | 2 +- .../opensearch/index/IndexModuleTests.java | 6 ++--- .../opensearch/index/IndexSettingsTests.java | 2 +- .../index/MergePolicySettingsTests.java | 4 ++-- .../index/engine/InternalEngineTests.java | 4 ++-- .../index/engine/NoOpEngineTests.java | 2 +- .../index/fielddata/FieldDataCacheTests.java | 2 +- .../index/shard/IndexShardTests.java | 2 +- .../shard/PrimaryReplicaSyncerTests.java | 2 +- .../index/shard/RefreshListenersTests.java | 2 +- .../snapshots/blobstore/FileInfoTests.java | 2 +- .../OnDemandBlockSnapshotIndexInputTests.java | 2 +- .../filecache/FileCacheCleanerTests.java | 4 ++-- .../remote/filecache/FileCacheTests.java | 6 ++--- .../remote/utils/TransferManagerTests.java | 4 ++-- .../index/translog/LocalTranslogTests.java | 4 ++-- .../index/translog/RemoteFSTranslogTests.java | 4 ++-- .../IndexingMemoryControllerTests.java | 4 ++-- .../indices/IndicesRequestCacheTests.java | 2 +- .../indices/breaker/BreakerSettingsTests.java | 2 +- .../HierarchyCircuitBreakerServiceTests.java | 9 ++++---- .../breaker/CircuitBreakerUnitTests.java | 2 +- .../opensearch/monitor/fs/FsProbeTests.java | 8 +++---- .../monitor/jvm/JvmGcMonitorServiceTests.java | 2 +- .../opensearch/monitor/jvm/JvmStatsTests.java | 2 +- .../java/org/opensearch/node/NodeTests.java | 8 +++---- .../nodesinfo/NodeInfoStreamingTests.java | 2 +- .../persistent/TestPersistentTasksPlugin.java | 2 +- .../RepositoriesServiceTests.java | 4 ++-- .../blobstore/BlobStoreRepositoryTests.java | 2 +- .../repositories/fs/FsRepositoryTests.java | 2 +- .../opensearch/rest/RestControllerTests.java | 6 ++--- .../rest/RestHttpResponseHeadersTests.java | 2 +- .../indices/RestValidateQueryActionTests.java | 2 +- .../RestCatSegmentReplicationActionTests.java | 2 +- .../opensearch/script/ScriptCacheTests.java | 2 +- .../opensearch/script/ScriptServiceTests.java | 2 +- .../search/DefaultSearchContextTests.java | 2 +- .../aggregations/AggregatorBaseTests.java | 4 ++-- .../bucket/BucketsAggregatorTests.java | 4 ++-- .../InternalVariableWidthHistogramTests.java | 4 ++-- .../BestDocsDeferringCollectorTests.java | 2 +- .../terms/BytesKeyedBucketOrdsTests.java | 2 +- .../terms/LongKeyedBucketOrdsTests.java | 2 +- .../terms/MultiTermsAggregatorTests.java | 2 +- .../bucket/terms/TermsAggregatorTests.java | 4 ++-- .../HyperLogLogPlusPlusSparseTests.java | 8 +++---- .../metrics/HyperLogLogPlusPlusTests.java | 8 +++---- .../metrics/InternalCardinalityTests.java | 2 +- .../metrics/MinAggregatorTests.java | 2 +- .../ScriptedMetricAggregatorTests.java | 8 +++---- .../pipeline/SearchPipelineServiceTests.java | 4 ++-- .../search/sort/BucketedSortTestCase.java | 2 +- .../snapshots/SnapshotResiliencyTests.java | 2 +- .../org/opensearch/test/NoopDiscovery.java | 4 ++-- .../transport/InboundAggregatorTests.java | 2 +- .../transport/InboundPipelineTests.java | 6 ++--- .../transport/OutboundHandlerTests.java | 4 ++-- .../transport/TcpTransportTests.java | 4 ++-- .../TransportServiceHandshakeTests.java | 2 +- .../AbstractCoordinatorTestCase.java | 2 +- .../common/breaker/TestCircuitBreaker.java | 3 +++ .../bytes/AbstractBytesReferenceTestCase.java | 4 ++-- .../opensearch/common/util/MockBigArrays.java | 4 ++-- .../index/engine/EngineTestCase.java | 4 ++-- .../index/mapper/MapperTestCase.java | 2 +- .../index/shard/IndexShardTestCase.java | 4 ++-- .../index/shard/RestoreOnlyRepository.java | 2 +- .../java/org/opensearch/node/MockNode.java | 2 +- .../node/RecoverySettingsChunkSizePlugin.java | 2 +- .../AbstractBlobContainerRetriesTestCase.java | 2 +- .../opensearch/search/MockSearchService.java | 2 +- .../aggregations/AggregatorTestCase.java | 6 ++--- .../AbstractSnapshotIntegTestCase.java | 2 +- .../test/AbstractBuilderTestCase.java | 2 +- .../opensearch/test/ExternalTestCluster.java | 2 +- .../test/InternalAggregationTestCase.java | 4 ++-- .../opensearch/test/InternalTestCluster.java | 10 ++++---- .../opensearch/test/MockHttpTransport.java | 2 +- .../test/OpenSearchIntegTestCase.java | 4 ++-- .../test/rest/RestActionTestCase.java | 2 +- .../test/transport/FakeTransport.java | 2 +- .../test/transport/MockTransportService.java | 2 +- .../test/transport/StubbableTransport.java | 4 ++-- .../transport/nio/MockNioTransport.java | 4 ++-- .../transport/nio/MockNioTransportPlugin.java | 2 +- .../nio/SimpleMockNioTransportTests.java | 2 +- 432 files changed, 755 insertions(+), 682 deletions(-) rename {server/src/main/java/org/opensearch/common/component => libs/common/src/main/java/org/opensearch/common/lifecycle}/AbstractLifecycleComponent.java (98%) rename {server/src/main/java/org/opensearch/common/component => libs/common/src/main/java/org/opensearch/common/lifecycle}/Lifecycle.java (99%) rename {server/src/main/java/org/opensearch/common/component => libs/common/src/main/java/org/opensearch/common/lifecycle}/LifecycleComponent.java (97%) rename {server/src/main/java/org/opensearch/common/component => libs/common/src/main/java/org/opensearch/common/lifecycle}/LifecycleListener.java (97%) create mode 100644 libs/common/src/main/java/org/opensearch/common/lifecycle/package-info.java rename {server => libs/core}/src/main/java/org/opensearch/OpenSearchParseException.java (100%) rename {server/src/main/java/org/opensearch => libs/core/src/main/java/org/opensearch/core}/common/breaker/CircuitBreaker.java (99%) rename {server/src/main/java/org/opensearch => libs/core/src/main/java/org/opensearch/core}/common/breaker/CircuitBreakingException.java (98%) rename {server/src/main/java/org/opensearch => libs/core/src/main/java/org/opensearch/core}/common/breaker/NoopCircuitBreaker.java (98%) rename {server/src/main/java/org/opensearch/common/component => libs/core/src/main/java/org/opensearch/core/common/breaker}/package-info.java (67%) rename {server/src/main/java/org/opensearch => libs/core/src/main/java/org/opensearch/core}/common/unit/ByteSizeUnit.java (99%) rename {server/src/main/java/org/opensearch => libs/core/src/main/java/org/opensearch/core}/common/unit/ByteSizeValue.java (99%) create mode 100644 libs/core/src/main/java/org/opensearch/core/common/unit/package-info.java rename {server/src/main/java/org/opensearch => libs/core/src/main/java/org/opensearch/core}/indices/breaker/AllCircuitBreakerStats.java (98%) rename {server/src/main/java/org/opensearch => libs/core/src/main/java/org/opensearch/core}/indices/breaker/CircuitBreakerService.java (92%) rename {server/src/main/java/org/opensearch => libs/core/src/main/java/org/opensearch/core}/indices/breaker/CircuitBreakerStats.java (97%) rename {server/src/main/java/org/opensearch => libs/core/src/main/java/org/opensearch/core}/indices/breaker/NoneCircuitBreakerService.java (91%) create mode 100644 libs/core/src/main/java/org/opensearch/core/indices/breaker/package-info.java create mode 100644 libs/core/src/main/java/org/opensearch/core/indices/package-info.java rename {server/src/test/java/org/opensearch => libs/core/src/test/java/org/opensearch/core}/common/unit/ByteSizeUnitTests.java (91%) rename {server/src/test/java/org/opensearch => libs/core/src/test/java/org/opensearch/core}/common/unit/ByteSizeValueTests.java (99%) diff --git a/benchmarks/src/main/java/org/opensearch/benchmark/search/aggregations/TermsReduceBenchmark.java b/benchmarks/src/main/java/org/opensearch/benchmark/search/aggregations/TermsReduceBenchmark.java index 76851881730a3..b18ea4327cbc2 100644 --- a/benchmarks/src/main/java/org/opensearch/benchmark/search/aggregations/TermsReduceBenchmark.java +++ b/benchmarks/src/main/java/org/opensearch/benchmark/search/aggregations/TermsReduceBenchmark.java @@ -40,14 +40,14 @@ import org.opensearch.action.search.SearchPhaseController; import org.opensearch.action.search.SearchProgressListener; import org.opensearch.action.search.SearchRequest; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; import org.opensearch.common.settings.Settings; import org.opensearch.core.index.Index; import org.opensearch.core.index.shard.ShardId; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.search.DocValueFormat; import org.opensearch.search.SearchModule; import org.opensearch.search.SearchShardTarget; diff --git a/benchmarks/src/main/java/org/opensearch/benchmark/store/remote/filecache/FileCacheBenchmark.java b/benchmarks/src/main/java/org/opensearch/benchmark/store/remote/filecache/FileCacheBenchmark.java index d3bfc9348cdb3..4f6b431cc56fd 100644 --- a/benchmarks/src/main/java/org/opensearch/benchmark/store/remote/filecache/FileCacheBenchmark.java +++ b/benchmarks/src/main/java/org/opensearch/benchmark/store/remote/filecache/FileCacheBenchmark.java @@ -27,8 +27,8 @@ import org.openjdk.jmh.annotations.Threads; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.index.store.remote.filecache.CachedIndexInput; import org.opensearch.index.store.remote.filecache.FileCache; import org.opensearch.index.store.remote.filecache.FileCacheFactory; diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/indices/DataStreamsStatsResponse.java b/client/rest-high-level/src/main/java/org/opensearch/client/indices/DataStreamsStatsResponse.java index 2c90d5a734aa0..327836160cceb 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/indices/DataStreamsStatsResponse.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/indices/DataStreamsStatsResponse.java @@ -34,7 +34,7 @@ import org.opensearch.client.core.BroadcastResponse; import org.opensearch.core.ParseField; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ConstructingObjectParser; import org.opensearch.core.xcontent.ObjectParser; import org.opensearch.core.xcontent.XContentParser; diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/indices/ResizeRequest.java b/client/rest-high-level/src/main/java/org/opensearch/client/indices/ResizeRequest.java index 61799a83e5df5..2ffb5c605c27f 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/indices/ResizeRequest.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/indices/ResizeRequest.java @@ -39,7 +39,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import java.io.IOException; import java.util.Collections; diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/indices/rollover/RolloverRequest.java b/client/rest-high-level/src/main/java/org/opensearch/client/indices/rollover/RolloverRequest.java index 443cef45e646b..327fccfb35951 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/indices/rollover/RolloverRequest.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/indices/rollover/RolloverRequest.java @@ -37,7 +37,7 @@ import org.opensearch.action.admin.indices.rollover.MaxSizeCondition; import org.opensearch.client.TimedRequest; import org.opensearch.client.indices.CreateIndexRequest; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java index 49acbe8cd2bc1..dd793fdbb7ff6 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java @@ -44,8 +44,8 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesArray; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentType; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/ClusterClientIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/ClusterClientIT.java index 2ac0eee407b95..3c7d988e3f01d 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/ClusterClientIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/ClusterClientIT.java @@ -61,7 +61,7 @@ import org.opensearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.support.XContentMapValues; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java index eaf8f4f8efff7..cefe992b58c64 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/CrudIT.java @@ -60,8 +60,8 @@ import org.opensearch.client.indices.GetIndexRequest; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java index 8ca5c5fa58742..ea57f1857fbfb 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java @@ -112,8 +112,8 @@ import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java index e21619ff15ef6..c672ed6be110d 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java @@ -79,7 +79,7 @@ import org.opensearch.core.common.Strings; import org.opensearch.test.OpenSearchTestCase; import org.junit.Assert; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import java.io.IOException; import java.util.Arrays; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java index e86de6ba718f9..af178ad2a5d47 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java @@ -48,7 +48,7 @@ import org.opensearch.action.support.master.AcknowledgedRequest; import org.opensearch.common.io.PathUtils; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.repositories.fs.FsRepository; import org.opensearch.test.OpenSearchTestCase; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java index a973753aa2032..178d9296bd242 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java @@ -77,8 +77,8 @@ import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/ClusterClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/ClusterClientDocumentationIT.java index f85fcae7af365..ccbb64b13b2d2 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/ClusterClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/ClusterClientDocumentationIT.java @@ -63,7 +63,7 @@ import org.opensearch.common.Priority; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentType; import org.opensearch.indices.recovery.RecoverySettings; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IndicesClientDocumentationIT.java index 73346bc57646e..de2eb91c6660b 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/IndicesClientDocumentationIT.java @@ -104,8 +104,8 @@ import org.opensearch.cluster.metadata.Template; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/DataStreamsStatsResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/DataStreamsStatsResponseTests.java index 96db2fb43c161..ef8655b0226dd 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/DataStreamsStatsResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/DataStreamsStatsResponseTests.java @@ -36,7 +36,7 @@ import org.opensearch.action.admin.indices.datastream.DataStreamsStatsAction; import org.opensearch.core.action.support.DefaultShardOperationFailedException; import org.opensearch.client.AbstractResponseTestCase; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverRequestTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverRequestTests.java index 9d51c09ba2e44..c8372d5001fac 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverRequestTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverRequestTests.java @@ -36,7 +36,7 @@ import org.opensearch.action.admin.indices.rollover.MaxAgeCondition; import org.opensearch.action.admin.indices.rollover.MaxDocsCondition; import org.opensearch.action.admin.indices.rollover.MaxSizeCondition; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.test.OpenSearchTestCase; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java index ff2418c5caee2..0fb00f6510147 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java @@ -36,7 +36,7 @@ import org.opensearch.action.admin.indices.rollover.MaxAgeCondition; import org.opensearch.action.admin.indices.rollover.MaxDocsCondition; import org.opensearch.action.admin.indices.rollover.MaxSizeCondition; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/main/java/org/opensearch/common/component/AbstractLifecycleComponent.java b/libs/common/src/main/java/org/opensearch/common/lifecycle/AbstractLifecycleComponent.java similarity index 98% rename from server/src/main/java/org/opensearch/common/component/AbstractLifecycleComponent.java rename to libs/common/src/main/java/org/opensearch/common/lifecycle/AbstractLifecycleComponent.java index 837f8af44bf13..111556fbe43cf 100644 --- a/server/src/main/java/org/opensearch/common/component/AbstractLifecycleComponent.java +++ b/libs/common/src/main/java/org/opensearch/common/lifecycle/AbstractLifecycleComponent.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.opensearch.common.component; +package org.opensearch.common.lifecycle; import java.io.IOException; import java.io.UncheckedIOException; diff --git a/server/src/main/java/org/opensearch/common/component/Lifecycle.java b/libs/common/src/main/java/org/opensearch/common/lifecycle/Lifecycle.java similarity index 99% rename from server/src/main/java/org/opensearch/common/component/Lifecycle.java rename to libs/common/src/main/java/org/opensearch/common/lifecycle/Lifecycle.java index fb12c1fc9ac4b..e76d49cbf49e8 100644 --- a/server/src/main/java/org/opensearch/common/component/Lifecycle.java +++ b/libs/common/src/main/java/org/opensearch/common/lifecycle/Lifecycle.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.opensearch.common.component; +package org.opensearch.common.lifecycle; /** * Lifecycle state. Allows the following transitions: diff --git a/server/src/main/java/org/opensearch/common/component/LifecycleComponent.java b/libs/common/src/main/java/org/opensearch/common/lifecycle/LifecycleComponent.java similarity index 97% rename from server/src/main/java/org/opensearch/common/component/LifecycleComponent.java rename to libs/common/src/main/java/org/opensearch/common/lifecycle/LifecycleComponent.java index 984d55df1bdfa..f343f9ada01ef 100644 --- a/server/src/main/java/org/opensearch/common/component/LifecycleComponent.java +++ b/libs/common/src/main/java/org/opensearch/common/lifecycle/LifecycleComponent.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.opensearch.common.component; +package org.opensearch.common.lifecycle; import org.opensearch.common.lease.Releasable; diff --git a/server/src/main/java/org/opensearch/common/component/LifecycleListener.java b/libs/common/src/main/java/org/opensearch/common/lifecycle/LifecycleListener.java similarity index 97% rename from server/src/main/java/org/opensearch/common/component/LifecycleListener.java rename to libs/common/src/main/java/org/opensearch/common/lifecycle/LifecycleListener.java index 89c344b955bc9..7ac41a5eb0df0 100644 --- a/server/src/main/java/org/opensearch/common/component/LifecycleListener.java +++ b/libs/common/src/main/java/org/opensearch/common/lifecycle/LifecycleListener.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.opensearch.common.component; +package org.opensearch.common.lifecycle; /** * Base lifecycle listener. diff --git a/libs/common/src/main/java/org/opensearch/common/lifecycle/package-info.java b/libs/common/src/main/java/org/opensearch/common/lifecycle/package-info.java new file mode 100644 index 0000000000000..1bedde5585e36 --- /dev/null +++ b/libs/common/src/main/java/org/opensearch/common/lifecycle/package-info.java @@ -0,0 +1,16 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Foundation implementation for a object lifecycle. + * + * See {@link org.opensearch.common.lifecycle.Lifecycle} for example usage + * + * @opensearch.internal + */ +package org.opensearch.common.lifecycle; diff --git a/libs/core/src/main/java/org/opensearch/OpenSearchException.java b/libs/core/src/main/java/org/opensearch/OpenSearchException.java index f75a225af1b4d..1a1273ad8963c 100644 --- a/libs/core/src/main/java/org/opensearch/OpenSearchException.java +++ b/libs/core/src/main/java/org/opensearch/OpenSearchException.java @@ -118,6 +118,14 @@ public class OpenSearchException extends RuntimeException implements Writeable, UNKNOWN_VERSION_ADDED ) ); + registerExceptionHandle( + new OpenSearchExceptionHandle( + org.opensearch.OpenSearchParseException.class, + org.opensearch.OpenSearchParseException::new, + 35, + UNKNOWN_VERSION_ADDED + ) + ); registerExceptionHandle( new OpenSearchExceptionHandle( org.opensearch.core.common.ParsingException.class, @@ -134,6 +142,14 @@ public class OpenSearchException extends RuntimeException implements Writeable, UNKNOWN_VERSION_ADDED ) ); + registerExceptionHandle( + new OpenSearchExceptionHandle( + org.opensearch.core.common.breaker.CircuitBreakingException.class, + org.opensearch.core.common.breaker.CircuitBreakingException::new, + 133, + UNKNOWN_VERSION_ADDED + ) + ); } /** diff --git a/server/src/main/java/org/opensearch/OpenSearchParseException.java b/libs/core/src/main/java/org/opensearch/OpenSearchParseException.java similarity index 100% rename from server/src/main/java/org/opensearch/OpenSearchParseException.java rename to libs/core/src/main/java/org/opensearch/OpenSearchParseException.java diff --git a/libs/core/src/main/java/org/opensearch/core/common/Strings.java b/libs/core/src/main/java/org/opensearch/core/common/Strings.java index d6c484e5e4746..ffb7711f84492 100644 --- a/libs/core/src/main/java/org/opensearch/core/common/Strings.java +++ b/libs/core/src/main/java/org/opensearch/core/common/Strings.java @@ -515,4 +515,27 @@ private static String changeFirstCharacterCase(String str, boolean capitalize) { sb.append(str.substring(1)); return sb.toString(); } + + /** + * Format the double value with a single decimal points, trimming trailing '.0'. + */ + public static String format1Decimals(double value, String suffix) { + String p = String.valueOf(value); + int ix = p.indexOf('.') + 1; + int ex = p.indexOf('E'); + char fraction = p.charAt(ix); + if (fraction == '0') { + if (ex != -1) { + return p.substring(0, ix - 1) + p.substring(ex) + suffix; + } else { + return p.substring(0, ix - 1) + suffix; + } + } else { + if (ex != -1) { + return p.substring(0, ix) + fraction + p.substring(ex) + suffix; + } else { + return p.substring(0, ix) + fraction + suffix; + } + } + } } diff --git a/server/src/main/java/org/opensearch/common/breaker/CircuitBreaker.java b/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreaker.java similarity index 99% rename from server/src/main/java/org/opensearch/common/breaker/CircuitBreaker.java rename to libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreaker.java index 4cbd375e8c1ff..0f75f763d21c1 100644 --- a/server/src/main/java/org/opensearch/common/breaker/CircuitBreaker.java +++ b/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreaker.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.opensearch.common.breaker; +package org.opensearch.core.common.breaker; import java.util.Locale; diff --git a/server/src/main/java/org/opensearch/common/breaker/CircuitBreakingException.java b/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreakingException.java similarity index 98% rename from server/src/main/java/org/opensearch/common/breaker/CircuitBreakingException.java rename to libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreakingException.java index 4cab014912970..4869127ee473d 100644 --- a/server/src/main/java/org/opensearch/common/breaker/CircuitBreakingException.java +++ b/libs/core/src/main/java/org/opensearch/core/common/breaker/CircuitBreakingException.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.opensearch.common.breaker; +package org.opensearch.core.common.breaker; import org.opensearch.OpenSearchException; import org.opensearch.core.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/opensearch/common/breaker/NoopCircuitBreaker.java b/libs/core/src/main/java/org/opensearch/core/common/breaker/NoopCircuitBreaker.java similarity index 98% rename from server/src/main/java/org/opensearch/common/breaker/NoopCircuitBreaker.java rename to libs/core/src/main/java/org/opensearch/core/common/breaker/NoopCircuitBreaker.java index ddd72280faa4f..86a0a7ccb96fd 100644 --- a/server/src/main/java/org/opensearch/common/breaker/NoopCircuitBreaker.java +++ b/libs/core/src/main/java/org/opensearch/core/common/breaker/NoopCircuitBreaker.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.opensearch.common.breaker; +package org.opensearch.core.common.breaker; /** * A CircuitBreaker that doesn't increment or adjust, and all operations are diff --git a/server/src/main/java/org/opensearch/common/component/package-info.java b/libs/core/src/main/java/org/opensearch/core/common/breaker/package-info.java similarity index 67% rename from server/src/main/java/org/opensearch/common/component/package-info.java rename to libs/core/src/main/java/org/opensearch/core/common/breaker/package-info.java index 34d034b5a3ffb..f9fb83d2207e1 100644 --- a/server/src/main/java/org/opensearch/common/component/package-info.java +++ b/libs/core/src/main/java/org/opensearch/core/common/breaker/package-info.java @@ -6,5 +6,7 @@ * compatible open source license. */ -/** Base Lifecycle Component package. */ -package org.opensearch.common.component; +/** + * Foundation classes for the Circuit Breaker + */ +package org.opensearch.core.common.breaker; diff --git a/server/src/main/java/org/opensearch/common/unit/ByteSizeUnit.java b/libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeUnit.java similarity index 99% rename from server/src/main/java/org/opensearch/common/unit/ByteSizeUnit.java rename to libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeUnit.java index b95e39feb8fac..68486dd7c975f 100644 --- a/server/src/main/java/org/opensearch/common/unit/ByteSizeUnit.java +++ b/libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeUnit.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.opensearch.common.unit; +package org.opensearch.core.common.unit; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; diff --git a/server/src/main/java/org/opensearch/common/unit/ByteSizeValue.java b/libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeValue.java similarity index 99% rename from server/src/main/java/org/opensearch/common/unit/ByteSizeValue.java rename to libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeValue.java index 7343915a52c0c..529501226f5e3 100644 --- a/server/src/main/java/org/opensearch/common/unit/ByteSizeValue.java +++ b/libs/core/src/main/java/org/opensearch/core/common/unit/ByteSizeValue.java @@ -30,10 +30,10 @@ * GitHub history for details. */ -package org.opensearch.common.unit; +package org.opensearch.core.common.unit; import org.opensearch.OpenSearchParseException; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/libs/core/src/main/java/org/opensearch/core/common/unit/package-info.java b/libs/core/src/main/java/org/opensearch/core/common/unit/package-info.java new file mode 100644 index 0000000000000..79b5dcdcba3b6 --- /dev/null +++ b/libs/core/src/main/java/org/opensearch/core/common/unit/package-info.java @@ -0,0 +1,16 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Common units of measurement used by the core library. These units of measurement classes exist + * in the core because they depend on core functionality beyond the common library (e.g., serializable). + * + * @opensearch.api + * @opensearch.experimental + */ +package org.opensearch.core.common.unit; diff --git a/server/src/main/java/org/opensearch/indices/breaker/AllCircuitBreakerStats.java b/libs/core/src/main/java/org/opensearch/core/indices/breaker/AllCircuitBreakerStats.java similarity index 98% rename from server/src/main/java/org/opensearch/indices/breaker/AllCircuitBreakerStats.java rename to libs/core/src/main/java/org/opensearch/core/indices/breaker/AllCircuitBreakerStats.java index 83f3f9532948f..ab887acb85a87 100644 --- a/server/src/main/java/org/opensearch/indices/breaker/AllCircuitBreakerStats.java +++ b/libs/core/src/main/java/org/opensearch/core/indices/breaker/AllCircuitBreakerStats.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.opensearch.indices.breaker; +package org.opensearch.core.indices.breaker; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; diff --git a/server/src/main/java/org/opensearch/indices/breaker/CircuitBreakerService.java b/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerService.java similarity index 92% rename from server/src/main/java/org/opensearch/indices/breaker/CircuitBreakerService.java rename to libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerService.java index b5cc1a6b1c6c5..ee9c94f432a36 100644 --- a/server/src/main/java/org/opensearch/indices/breaker/CircuitBreakerService.java +++ b/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerService.java @@ -30,12 +30,12 @@ * GitHub history for details. */ -package org.opensearch.indices.breaker; +package org.opensearch.core.indices.breaker; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; +import org.opensearch.core.common.breaker.CircuitBreaker; /** * Interface for Circuit Breaker services, which provide breakers to classes diff --git a/server/src/main/java/org/opensearch/indices/breaker/CircuitBreakerStats.java b/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerStats.java similarity index 97% rename from server/src/main/java/org/opensearch/indices/breaker/CircuitBreakerStats.java rename to libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerStats.java index 94e63acd10648..0e53a38908a96 100644 --- a/server/src/main/java/org/opensearch/indices/breaker/CircuitBreakerStats.java +++ b/libs/core/src/main/java/org/opensearch/core/indices/breaker/CircuitBreakerStats.java @@ -30,12 +30,12 @@ * GitHub history for details. */ -package org.opensearch.indices.breaker; +package org.opensearch.core.indices.breaker; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/indices/breaker/NoneCircuitBreakerService.java b/libs/core/src/main/java/org/opensearch/core/indices/breaker/NoneCircuitBreakerService.java similarity index 91% rename from server/src/main/java/org/opensearch/indices/breaker/NoneCircuitBreakerService.java rename to libs/core/src/main/java/org/opensearch/core/indices/breaker/NoneCircuitBreakerService.java index bcb47b48a5f14..4095fd32b6d3c 100644 --- a/server/src/main/java/org/opensearch/indices/breaker/NoneCircuitBreakerService.java +++ b/libs/core/src/main/java/org/opensearch/core/indices/breaker/NoneCircuitBreakerService.java @@ -30,10 +30,10 @@ * GitHub history for details. */ -package org.opensearch.indices.breaker; +package org.opensearch.core.indices.breaker; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; /** * Class that returns a breaker that never breaks diff --git a/libs/core/src/main/java/org/opensearch/core/indices/breaker/package-info.java b/libs/core/src/main/java/org/opensearch/core/indices/breaker/package-info.java new file mode 100644 index 0000000000000..a98f9ab1d9f1e --- /dev/null +++ b/libs/core/src/main/java/org/opensearch/core/indices/breaker/package-info.java @@ -0,0 +1,15 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Top Level core circuit breaker implementation + * + * @opensearch.internal + * @opensearch.experimental + */ +package org.opensearch.core.indices.breaker; diff --git a/libs/core/src/main/java/org/opensearch/core/indices/package-info.java b/libs/core/src/main/java/org/opensearch/core/indices/package-info.java new file mode 100644 index 0000000000000..c80edf3d2f01a --- /dev/null +++ b/libs/core/src/main/java/org/opensearch/core/indices/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Top Level Package for implementations used across indexes + */ +package org.opensearch.core.indices; diff --git a/server/src/test/java/org/opensearch/common/unit/ByteSizeUnitTests.java b/libs/core/src/test/java/org/opensearch/core/common/unit/ByteSizeUnitTests.java similarity index 91% rename from server/src/test/java/org/opensearch/common/unit/ByteSizeUnitTests.java rename to libs/core/src/test/java/org/opensearch/core/common/unit/ByteSizeUnitTests.java index c6bcceec99fbd..07b9131602ac3 100644 --- a/server/src/test/java/org/opensearch/common/unit/ByteSizeUnitTests.java +++ b/libs/core/src/test/java/org/opensearch/core/common/unit/ByteSizeUnitTests.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.opensearch.common.unit; +package org.opensearch.core.common.unit; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; @@ -38,12 +38,12 @@ import java.io.IOException; -import static org.opensearch.common.unit.ByteSizeUnit.BYTES; -import static org.opensearch.common.unit.ByteSizeUnit.GB; -import static org.opensearch.common.unit.ByteSizeUnit.KB; -import static org.opensearch.common.unit.ByteSizeUnit.MB; -import static org.opensearch.common.unit.ByteSizeUnit.PB; -import static org.opensearch.common.unit.ByteSizeUnit.TB; +import static org.opensearch.core.common.unit.ByteSizeUnit.BYTES; +import static org.opensearch.core.common.unit.ByteSizeUnit.GB; +import static org.opensearch.core.common.unit.ByteSizeUnit.KB; +import static org.opensearch.core.common.unit.ByteSizeUnit.MB; +import static org.opensearch.core.common.unit.ByteSizeUnit.PB; +import static org.opensearch.core.common.unit.ByteSizeUnit.TB; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; diff --git a/server/src/test/java/org/opensearch/common/unit/ByteSizeValueTests.java b/libs/core/src/test/java/org/opensearch/core/common/unit/ByteSizeValueTests.java similarity index 99% rename from server/src/test/java/org/opensearch/common/unit/ByteSizeValueTests.java rename to libs/core/src/test/java/org/opensearch/core/common/unit/ByteSizeValueTests.java index 7f6f753209a61..def1694a72ba4 100644 --- a/server/src/test/java/org/opensearch/common/unit/ByteSizeValueTests.java +++ b/libs/core/src/test/java/org/opensearch/core/common/unit/ByteSizeValueTests.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.opensearch.common.unit; +package org.opensearch.core.common.unit; import org.opensearch.OpenSearchParseException; import org.opensearch.core.common.io.stream.Writeable.Reader; diff --git a/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java b/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java index c5dc68ff4c800..0384615c2f2f5 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java +++ b/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/InternalMatrixStatsTests.java @@ -37,7 +37,7 @@ import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.core.xcontent.ContextParser; import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.plugins.SearchPlugin; import org.opensearch.script.ScriptService; import org.opensearch.search.aggregations.Aggregation; diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/BytesProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/BytesProcessor.java index 3bd1137975800..b76fe41c8e67d 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/BytesProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/BytesProcessor.java @@ -32,7 +32,7 @@ package org.opensearch.ingest.common; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import java.util.Map; diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java index 385d77418ee7b..ce8c182b60a61 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/BytesProcessorTests.java @@ -34,8 +34,8 @@ import org.opensearch.OpenSearchException; import org.opensearch.OpenSearchParseException; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.ingest.IngestDocument; import org.opensearch.ingest.Processor; import org.opensearch.ingest.RandomDocumentPicks; diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/api/LimitedCharSequence.java b/modules/lang-painless/src/main/java/org/opensearch/painless/api/LimitedCharSequence.java index 8084420295280..c8a28158ad8db 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/api/LimitedCharSequence.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/api/LimitedCharSequence.java @@ -32,8 +32,8 @@ package org.opensearch.painless.api; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.painless.CompilerSettings; import java.util.regex.Pattern; diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimitTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimitTests.java index c3233bc0d924a..26bebfdee2fd0 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimitTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/RegexLimitTests.java @@ -34,7 +34,7 @@ import org.junit.AfterClass; import org.junit.BeforeClass; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.common.settings.Settings; import java.util.Collections; diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/api/LimitedCharSequenceTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/api/LimitedCharSequenceTests.java index a7787f4bc3c29..4117eb331197f 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/api/LimitedCharSequenceTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/api/LimitedCharSequenceTests.java @@ -32,7 +32,7 @@ package org.opensearch.painless.api; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.test.OpenSearchTestCase; import java.util.regex.Pattern; diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java index 3500086564719..08d9a4855c473 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java @@ -92,8 +92,8 @@ import org.opensearch.index.query.QueryShardContext; import org.opensearch.index.query.QueryShardException; import org.opensearch.index.query.Rewriteable; -import org.opensearch.indices.breaker.CircuitBreakerService; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import java.io.ByteArrayInputStream; import java.io.IOException; diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java index d027026a6b317..b021d739cc6a6 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalResponseTests.java @@ -39,8 +39,8 @@ import org.opensearch.cluster.block.ClusterBlockException; import org.opensearch.cluster.coordination.NoClusterManagerBlockService; import org.opensearch.core.common.ParsingException; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java index 6170c1adabbea..a0ad02899ea27 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -53,7 +53,7 @@ import org.opensearch.action.support.TransportAction; import org.opensearch.client.ParentTaskAssigningClient; import org.opensearch.common.Nullable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.index.VersionType; diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index ebbd2da776ace..e671fec1fedee 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -44,8 +44,8 @@ import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.util.FileSystemUtils; import org.opensearch.common.io.Streams; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; diff --git a/modules/repository-url/src/internalClusterTest/java/org/opensearch/repositories/url/URLSnapshotRestoreIT.java b/modules/repository-url/src/internalClusterTest/java/org/opensearch/repositories/url/URLSnapshotRestoreIT.java index 1bf461d67862b..9c61bca316a56 100644 --- a/modules/repository-url/src/internalClusterTest/java/org/opensearch/repositories/url/URLSnapshotRestoreIT.java +++ b/modules/repository-url/src/internalClusterTest/java/org/opensearch/repositories/url/URLSnapshotRestoreIT.java @@ -38,7 +38,7 @@ import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.client.Client; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.plugin.repository.url.URLRepositoryModulePlugin; import org.opensearch.plugins.Plugin; import org.opensearch.repositories.fs.FsRepository; diff --git a/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobStore.java b/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobStore.java index fbfbf5e006fee..0fad0cbe21033 100644 --- a/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobStore.java +++ b/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobStore.java @@ -37,8 +37,8 @@ import org.opensearch.common.blobstore.BlobStore; import org.opensearch.common.blobstore.BlobStoreException; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import java.net.MalformedURLException; import java.net.URL; diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java index db76c0b145840..95440e28686e7 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java @@ -39,8 +39,8 @@ import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.TransportAddress; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.http.HttpServerTransport; import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; diff --git a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4BadRequestIT.java b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4BadRequestIT.java index 2584b768707cd..f7e1c6106cf5a 100644 --- a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4BadRequestIT.java +++ b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4BadRequestIT.java @@ -38,7 +38,7 @@ import org.opensearch.client.ResponseException; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.http.HttpTransportSettings; import org.opensearch.test.rest.OpenSearchRestTestCase; import org.opensearch.test.rest.yaml.ObjectPath; diff --git a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java index 124bc02527bd1..998c89590c53c 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java @@ -78,8 +78,8 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.core.xcontent.NamedXContentRegistry; diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/CopyBytesSocketChannel.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/CopyBytesSocketChannel.java index 9a5459a5ab572..a30cf00ce4047 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/transport/CopyBytesSocketChannel.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/CopyBytesSocketChannel.java @@ -52,7 +52,7 @@ import io.netty.channel.RecvByteBufAllocator; import io.netty.channel.socket.nio.NioSocketChannel; import org.opensearch.common.SuppressForbidden; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import java.io.IOException; import java.nio.ByteBuffer; diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/Netty4ModulePlugin.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/Netty4ModulePlugin.java index d7f2f6eb6acbb..ef60797bca067 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/transport/Netty4ModulePlugin.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/Netty4ModulePlugin.java @@ -45,7 +45,7 @@ import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.http.HttpServerTransport; import org.opensearch.http.netty4.Netty4HttpServerTransport; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.plugins.NetworkPlugin; import org.opensearch.plugins.Plugin; import org.opensearch.threadpool.ThreadPool; diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java index f2f6538d305d9..d32a43e3eb9eb 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java @@ -42,7 +42,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.common.Booleans; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.monitor.jvm.JvmInfo; import java.util.concurrent.atomic.AtomicBoolean; diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Transport.java index 561cac2facbff..637cb10c383bd 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Transport.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/netty4/Netty4Transport.java @@ -57,13 +57,13 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.common.util.net.NetUtils; import org.opensearch.common.lease.Releasables; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.Netty4NioSocketChannel; import org.opensearch.transport.NettyAllocator; diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java index ef014aa39367b..5fcfc4ee5e151 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java @@ -44,7 +44,7 @@ import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.http.HttpServerTransport; import org.opensearch.http.HttpTransportSettings; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; import org.opensearch.rest.RestRequest; diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpClient.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpClient.java index cad2e50327023..9f359002e441a 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpClient.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpClient.java @@ -72,8 +72,8 @@ import io.netty.util.AttributeKey; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.tasks.Task; import org.opensearch.transport.NettyAllocator; diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java index adf4d59a0c139..6c8cf69afb148 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -52,7 +52,7 @@ import org.opensearch.http.HttpResponse; import org.opensearch.http.HttpServerTransport; import org.opensearch.http.NullDispatcher; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.core.rest.RestStatus; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java index 5a43057b1b7d1..d23edfda829f9 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java @@ -65,7 +65,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.TransportAddress; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; @@ -75,7 +75,7 @@ import org.opensearch.http.HttpServerTransport; import org.opensearch.http.HttpTransportSettings; import org.opensearch.http.NullDispatcher; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; import org.opensearch.rest.RestRequest; diff --git a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java index f80d7f41b5f55..5c1c5970a7cfb 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java @@ -39,7 +39,7 @@ import org.opensearch.common.transport.TransportAddress; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.common.util.PageCacheRecycler; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.SharedGroupFactory; diff --git a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4UtilsTests.java b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4UtilsTests.java index d3fa8ea56ffe7..5ee194b7bb513 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4UtilsTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4UtilsTests.java @@ -36,14 +36,14 @@ import io.netty.buffer.CompositeByteBuf; import io.netty.buffer.Unpooled; import org.apache.lucene.util.BytesRef; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.bytes.AbstractBytesReferenceTestCase; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.io.stream.ReleasableBytesStreamOutput; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.PageCacheRecycler; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; diff --git a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/NettyTransportMultiPortTests.java b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/NettyTransportMultiPortTests.java index 5d7841df4bf33..c10a92ba1900b 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/NettyTransportMultiPortTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/NettyTransportMultiPortTests.java @@ -32,14 +32,14 @@ package org.opensearch.transport.netty4; import org.opensearch.Version; -import org.opensearch.common.component.Lifecycle; +import org.opensearch.common.lifecycle.Lifecycle; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.network.NetworkService; import org.opensearch.common.network.NetworkUtils; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.common.util.PageCacheRecycler; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; diff --git a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java index 619f473b8bef2..ee2bf12a4246f 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java @@ -43,7 +43,7 @@ import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.common.util.io.IOUtils; import org.opensearch.common.util.net.NetUtils; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.transport.MockTransportService; import org.opensearch.test.transport.StubbableTransport; import org.opensearch.transport.AbstractSimpleTransportTestCase; diff --git a/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java b/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java index 9dbf08a3e1a01..23db0a74dc3fc 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java +++ b/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java @@ -51,7 +51,7 @@ import org.opensearch.OpenSearchException; import org.opensearch.SpecialPermission; import org.opensearch.cloud.azure.classic.AzureServiceRemoteException; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.core.common.Strings; diff --git a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java index 861926a9e67c9..afa35f63ae4dc 100644 --- a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java @@ -46,7 +46,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.TransportAddress; import org.opensearch.common.util.PageCacheRecycler; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.transport.MockTransportService; import org.opensearch.transport.Transport; import org.opensearch.transport.TransportService; diff --git a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java index cc65e92b1485a..ea10d03576d94 100644 --- a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java +++ b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java @@ -45,7 +45,7 @@ import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.discovery.SeedHostsProvider; import org.opensearch.discovery.SeedHostsResolver; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.transport.MockTransportService; import org.opensearch.transport.TransportService; import org.opensearch.transport.nio.MockNioTransport; diff --git a/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/GceMetadataService.java b/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/GceMetadataService.java index 4873cb6dcbf7a..b7ae7f8b404be 100644 --- a/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/GceMetadataService.java +++ b/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/GceMetadataService.java @@ -46,7 +46,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.cloud.gce.util.Access; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; diff --git a/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java index 67e79addfedc5..48285f80150be 100644 --- a/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -46,7 +46,7 @@ import org.opensearch.common.regex.Regex; import org.opensearch.common.settings.MockSecureSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.plugins.Plugin; import org.opensearch.repositories.blobstore.OpenSearchMockAPIBasedRepositoryIntegTestCase; import org.opensearch.core.rest.RestStatus; diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepository.java index 2677604ecb622..3846dd14559b5 100644 --- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureRepository.java @@ -41,7 +41,7 @@ import org.opensearch.common.blobstore.BlobStore; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.indices.recovery.RecoverySettings; diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java index c518cc2716db6..88e9a63384f7a 100644 --- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java @@ -61,8 +61,8 @@ import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsException; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import java.net.Authenticator; diff --git a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java index 8b68ccebf8c53..ab16edc7b1a2d 100644 --- a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java +++ b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java @@ -53,7 +53,7 @@ import org.opensearch.common.network.InetAddresses; import org.opensearch.common.settings.MockSecureSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.CountDown; import org.opensearch.core.rest.RestStatus; diff --git a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureRepositorySettingsTests.java b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureRepositorySettingsTests.java index 24a226290985a..bfae0a3c4438c 100644 --- a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureRepositorySettingsTests.java +++ b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureRepositorySettingsTests.java @@ -38,8 +38,8 @@ import org.opensearch.cluster.metadata.RepositoryMetadata; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; import org.opensearch.indices.recovery.RecoverySettings; diff --git a/plugins/repository-gcs/src/internalClusterTest/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/internalClusterTest/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index f0f0fb7681c1a..92a2f99f4a441 100644 --- a/plugins/repository-gcs/src/internalClusterTest/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/internalClusterTest/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -55,8 +55,8 @@ import org.opensearch.common.regex.Regex; import org.opensearch.common.settings.MockSecureSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; import org.opensearch.indices.recovery.RecoverySettings; diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 226aec437fc0f..f5c20003ea7b6 100644 --- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -55,8 +55,8 @@ import org.opensearch.common.blobstore.support.PlainBlobMetadata; import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.io.Streams; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import java.io.ByteArrayInputStream; import java.io.IOException; diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRepository.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRepository.java index a743ac72bdb8b..05d9739c00d50 100644 --- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRepository.java +++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -38,8 +38,8 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.blobstore.BlobPath; import org.opensearch.common.settings.Setting; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.indices.recovery.RecoverySettings; diff --git a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java index 488376d36cdc4..466344668d966 100644 --- a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java +++ b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java @@ -53,7 +53,7 @@ import org.opensearch.common.network.InetAddresses; import org.opensearch.common.settings.MockSecureSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.CountDown; import org.opensearch.core.common.Strings; diff --git a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java index 88c58942e9bbf..10c3bc2d0364b 100644 --- a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java @@ -48,7 +48,7 @@ import org.opensearch.common.SuppressForbidden; import org.opensearch.common.blobstore.BlobPath; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; diff --git a/plugins/repository-s3/src/internalClusterTest/java/org/opensearch/repositories/s3/S3BlobStoreRepositoryTests.java b/plugins/repository-s3/src/internalClusterTest/java/org/opensearch/repositories/s3/S3BlobStoreRepositoryTests.java index 3070c654a96ee..805af9874b552 100644 --- a/plugins/repository-s3/src/internalClusterTest/java/org/opensearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/plugins/repository-s3/src/internalClusterTest/java/org/opensearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -45,7 +45,7 @@ import org.opensearch.common.settings.MockSecureSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.indices.recovery.RecoverySettings; import org.opensearch.plugins.Plugin; diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java index 81a902a6992d8..1a644934245cb 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java @@ -51,8 +51,8 @@ import org.opensearch.common.blobstore.support.AbstractBlobContainer; import org.opensearch.common.blobstore.support.PlainBlobMetadata; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import software.amazon.awssdk.core.exception.SdkException; import software.amazon.awssdk.core.sync.RequestBody; import software.amazon.awssdk.services.s3.model.AbortMultipartUploadRequest; diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobStore.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobStore.java index 30040e182cbc9..ed1ebf1f531f8 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobStore.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobStore.java @@ -39,7 +39,7 @@ import org.opensearch.common.blobstore.BlobPath; import org.opensearch.common.blobstore.BlobStore; import org.opensearch.common.blobstore.BlobStoreException; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import software.amazon.awssdk.services.s3.model.ObjectCannedACL; import software.amazon.awssdk.services.s3.model.StorageClass; import org.opensearch.repositories.s3.async.AsyncExecutorContainer; diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Repository.java index d42bfc0be7e4f..f98b775d9ce4b 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Repository.java @@ -46,8 +46,8 @@ import org.opensearch.common.settings.SecureSetting; import org.opensearch.core.common.settings.SecureString; import org.opensearch.common.settings.Setting; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.indices.recovery.RecoverySettings; diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/async/AsyncTransferManager.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/async/AsyncTransferManager.java index 5b43ae84c51dc..cb6851652f208 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/async/AsyncTransferManager.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/async/AsyncTransferManager.java @@ -17,7 +17,7 @@ import org.opensearch.common.blobstore.exception.CorruptFileException; import org.opensearch.common.blobstore.stream.write.WritePriority; import org.opensearch.common.io.InputStreamContainer; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.common.util.ByteUtils; import org.opensearch.repositories.s3.io.CheckedContainer; import org.opensearch.repositories.s3.SocketAccess; diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerMockClientTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerMockClientTests.java index 10137f0475177..8bb446fff0b61 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerMockClientTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerMockClientTests.java @@ -24,7 +24,7 @@ import org.opensearch.common.io.InputStreamContainer; import org.opensearch.common.lucene.store.ByteArrayIndexInput; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.io.IOUtils; import org.opensearch.repositories.s3.async.AsyncExecutorContainer; import org.opensearch.repositories.s3.async.AsyncTransferManager; diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerRetriesTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerRetriesTests.java index 1a1fb123aa5ea..016be07e5d533 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -57,8 +57,8 @@ import org.opensearch.common.network.InetAddresses; import org.opensearch.common.settings.MockSecureSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.CountDown; import org.opensearch.common.util.io.IOUtils; diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java index a2a7ca8d8bdd5..f88b3616d2f0a 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java @@ -40,7 +40,7 @@ import org.opensearch.common.blobstore.BlobStoreException; import org.opensearch.common.blobstore.DeleteResult; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.test.OpenSearchTestCase; import software.amazon.awssdk.core.exception.SdkException; import software.amazon.awssdk.core.sync.RequestBody; diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RepositoryTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RepositoryTests.java index 84d56c7ae2854..e5fd9e5caab9c 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RepositoryTests.java @@ -36,8 +36,8 @@ import org.opensearch.cluster.metadata.RepositoryMetadata; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.indices.recovery.RecoverySettings; import org.opensearch.repositories.RepositoryException; diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/async/AsyncTransferManagerTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/async/AsyncTransferManagerTests.java index 596291a1d94fb..2fc10c65eaa65 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/async/AsyncTransferManagerTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/async/AsyncTransferManagerTests.java @@ -14,7 +14,7 @@ import org.opensearch.common.blobstore.exception.CorruptFileException; import org.opensearch.common.blobstore.stream.write.WritePriority; import org.opensearch.common.io.InputStreamContainer; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.repositories.blobstore.ZeroInputStream; import org.opensearch.test.OpenSearchTestCase; import software.amazon.awssdk.awscore.exception.AwsErrorDetails; diff --git a/plugins/transport-nio/src/main/java/org/opensearch/http/nio/NioHttpServerTransport.java b/plugins/transport-nio/src/main/java/org/opensearch/http/nio/NioHttpServerTransport.java index 1befc110eb6a5..9dd13144bc454 100644 --- a/plugins/transport-nio/src/main/java/org/opensearch/http/nio/NioHttpServerTransport.java +++ b/plugins/transport-nio/src/main/java/org/opensearch/http/nio/NioHttpServerTransport.java @@ -41,7 +41,7 @@ import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.core.xcontent.NamedXContentRegistry; diff --git a/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransport.java b/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransport.java index 1509e0b179bfe..0990f97124e53 100644 --- a/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransport.java +++ b/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransport.java @@ -44,7 +44,7 @@ import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.PageCacheRecycler; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.nio.BytesChannelContext; import org.opensearch.nio.ChannelFactory; import org.opensearch.nio.Config; diff --git a/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransportPlugin.java b/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransportPlugin.java index 67598aec154fa..bd8aec534f608 100644 --- a/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransportPlugin.java +++ b/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/NioTransportPlugin.java @@ -47,7 +47,7 @@ import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.http.HttpServerTransport; import org.opensearch.http.nio.NioHttpServerTransport; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.plugins.NetworkPlugin; import org.opensearch.plugins.Plugin; import org.opensearch.threadpool.ThreadPool; diff --git a/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/TcpReadWriteHandler.java b/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/TcpReadWriteHandler.java index 29ef19a2aec87..ee96e9d85c03b 100644 --- a/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/TcpReadWriteHandler.java +++ b/plugins/transport-nio/src/main/java/org/opensearch/transport/nio/TcpReadWriteHandler.java @@ -32,7 +32,7 @@ package org.opensearch.transport.nio; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.bytes.CompositeBytesReference; import org.opensearch.common.bytes.ReleasableBytesReference; diff --git a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/HttpReadWriteHandlerTests.java b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/HttpReadWriteHandlerTests.java index c606a4818a324..ca62c0dedd452 100644 --- a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/HttpReadWriteHandlerTests.java +++ b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/HttpReadWriteHandlerTests.java @@ -49,7 +49,7 @@ import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.http.CorsHandler; import org.opensearch.http.HttpChannel; diff --git a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpClient.java b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpClient.java index edaee15507df9..2922f28e3be18 100644 --- a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpClient.java +++ b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpClient.java @@ -51,8 +51,8 @@ import org.opensearch.action.support.PlainActionFuture; import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.io.IOUtils; import org.opensearch.nio.BytesChannelContext; import org.opensearch.nio.ChannelFactory; diff --git a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpServerTransportTests.java b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpServerTransportTests.java index c69fe23002dfe..4d0db18d433ec 100644 --- a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpServerTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpServerTransportTests.java @@ -54,7 +54,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.TransportAddress; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; @@ -64,7 +64,7 @@ import org.opensearch.http.HttpServerTransport; import org.opensearch.http.HttpTransportSettings; import org.opensearch.http.NullDispatcher; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.nio.NioSocketChannel; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; diff --git a/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java b/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java index 4b06c4e15bce7..d0e779edded7d 100644 --- a/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java @@ -43,7 +43,7 @@ import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.common.util.io.IOUtils; import org.opensearch.common.util.net.NetUtils; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.transport.MockTransportService; import org.opensearch.test.transport.StubbableTransport; import org.opensearch.transport.AbstractSimpleTransportTestCase; diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java index 51598d7775623..a75448dadf427 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java @@ -65,7 +65,7 @@ import org.opensearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.opensearch.common.Priority; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.index.Index; diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/rollover/RolloverIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/rollover/RolloverIT.java index 7f175289f3a88..d6a7dcf4b6152 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/rollover/RolloverIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/rollover/RolloverIT.java @@ -47,8 +47,8 @@ import org.opensearch.cluster.routing.allocation.AllocationService; import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.plugins.Plugin; import org.opensearch.test.OpenSearchIntegTestCase; diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java index 850034bc631b1..c77178a5165bc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkProcessorIT.java @@ -41,8 +41,8 @@ import org.opensearch.client.Requests; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.test.OpenSearchIntegTestCase; diff --git a/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java index afa5ac908c137..24a66083d85ff 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java @@ -47,7 +47,7 @@ import org.opensearch.action.support.WriteRequest; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java index 61171b0a817b0..4683535a3a095 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java @@ -50,7 +50,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java index cafce1a194caa..2d2714723802f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java @@ -59,8 +59,8 @@ import org.opensearch.common.io.PathUtils; import org.opensearch.common.io.PathUtilsForTesting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.env.Environment; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.IndexSettings; diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/settings/ClusterSettingsIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/settings/ClusterSettingsIT.java index 79b674b23fd48..f1c34aa4f4141 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/settings/ClusterSettingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/settings/ClusterSettingsIT.java @@ -42,7 +42,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsException; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.common.unit.TimeValue; import org.opensearch.indices.recovery.RecoverySettings; import org.opensearch.test.OpenSearchIntegTestCase; diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterShardLimitIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterShardLimitIT.java index 23335f6e82ef1..9433bebc24c20 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterShardLimitIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/shards/ClusterShardLimitIT.java @@ -46,7 +46,7 @@ import org.opensearch.common.Priority; import org.opensearch.common.network.NetworkModule; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.common.util.io.IOUtils; import org.opensearch.indices.ShardLimitValidator; import org.opensearch.snapshots.SnapshotInfo; diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java index 9a465c2f9121c..e48e13b471ba5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/ReplicaShardAllocatorIT.java @@ -38,8 +38,8 @@ import org.opensearch.cluster.routing.UnassignedInfo; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.Priority; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.common.settings.Settings; import org.opensearch.core.index.Index; import org.opensearch.index.IndexService; diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java index ba9f335cd24d4..4b95c2fc4bce5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java @@ -58,8 +58,8 @@ import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.util.io.IOUtils; @@ -84,7 +84,7 @@ import org.opensearch.index.translog.Translog; import org.opensearch.index.translog.TranslogStats; import org.opensearch.indices.IndicesService; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.indices.recovery.RecoveryState; import org.opensearch.indices.replication.checkpoint.SegmentReplicationCheckpointPublisher; import org.opensearch.plugins.Plugin; diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java index b6124ff09d992..df2c8c62ca392 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java @@ -65,8 +65,8 @@ import org.opensearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand; import org.opensearch.common.io.PathUtils; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.index.shard.ShardId; import org.opensearch.env.Environment; import org.opensearch.env.NodeEnvironment; diff --git a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java index d51e4bbff11b5..89031f68aba97 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java @@ -66,8 +66,8 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.env.NodeEnvironment; import org.opensearch.core.index.Index; diff --git a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java index 1dd0f6a3d664e..6cc1c51ed65d9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedTranslogIT.java @@ -38,8 +38,8 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.cluster.routing.UnassignedInfo; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.index.IndexSettings; import org.opensearch.index.MockEngineFactoryPlugin; import org.opensearch.index.translog.TestTranslog; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java index 2ab44f8318617..10bd179ddc5fd 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -46,17 +46,17 @@ import org.opensearch.client.Requests; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.routing.allocation.decider.EnableAllocationDecider; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.indices.breaker.CircuitBreakerStats; -import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; +import org.opensearch.core.indices.breaker.CircuitBreakerStats; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.rest.RestStatus; +import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; @@ -197,7 +197,7 @@ public void testRamAccountingTermsEnum() throws Exception { prepareCreate("ramtest").setSource( "{\"mappings\": {\"type\": {\"properties\": {\"test\": " + "{\"type\": \"text\",\"fielddata\": true,\"fielddata_frequency_filter\": {\"max\": 10000}}}}}}", - XContentType.JSON + MediaTypeRegistry.JSON ) ); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 341c0a965f94e..5ce9de7d0eae0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -44,7 +44,7 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.common.Strings; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java index c31b5e1f3bc5b..850f08b8136c1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java @@ -74,12 +74,12 @@ import org.opensearch.common.Priority; import org.opensearch.common.SetOnce; import org.opensearch.common.Strings; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java index 28bd5a6ae252d..e95f10bd7abdb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java @@ -46,8 +46,8 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.set.Sets; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.IndexSettings; diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java index b5d7bd476059d..2454f6553951e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java @@ -39,8 +39,8 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.index.query.QueryBuilders; import org.opensearch.indices.recovery.PeerRecoveryTargetService; import org.opensearch.indices.recovery.FileChunkRequest; diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBackpressureIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBackpressureIT.java index 3fe7f3d553a1b..608d7d9d02581 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBackpressureIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBackpressureIT.java @@ -12,7 +12,7 @@ import org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsResponse; import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; diff --git a/server/src/internalClusterTest/java/org/opensearch/repositories/fs/FsBlobStoreRepositoryIT.java b/server/src/internalClusterTest/java/org/opensearch/repositories/fs/FsBlobStoreRepositoryIT.java index 4e7f2ae486c93..d5c744a49c62d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/repositories/fs/FsBlobStoreRepositoryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/repositories/fs/FsBlobStoreRepositoryIT.java @@ -37,8 +37,8 @@ import org.opensearch.common.blobstore.fs.FsBlobStore; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.io.IOUtils; import org.opensearch.repositories.blobstore.OpenSearchBlobStoreRepositoryIntegTestCase; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java index be69428453952..a54ca880c3dd0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java @@ -35,7 +35,7 @@ import org.opensearch.OpenSearchException; import org.opensearch.ExceptionsHelper; import org.opensearch.action.index.IndexRequestBuilder; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.common.settings.Settings; import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; import org.opensearch.search.aggregations.Aggregator; diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java index 483d698f3c9a4..861279c4000d8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/CorruptedBlobStoreRepositoryIT.java @@ -41,7 +41,7 @@ import org.opensearch.cluster.metadata.Metadata; import org.opensearch.cluster.metadata.RepositoriesMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.common.util.io.IOUtils; import org.opensearch.repositories.IndexId; import org.opensearch.repositories.RepositoriesService; diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/RepositoriesIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/RepositoriesIT.java index f6df7cccf96f7..bbca3bdc417c7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/RepositoriesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/RepositoriesIT.java @@ -44,7 +44,7 @@ import org.opensearch.cluster.metadata.RepositoryMetadata; import org.opensearch.core.util.FileSystemUtils; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.repositories.RepositoriesService; import org.opensearch.repositories.RepositoryException; import org.opensearch.repositories.RepositoryVerificationException; diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java index 30a836b41e29e..8677e61efeb46 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java @@ -52,7 +52,7 @@ import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.io.PathUtils; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java index 300e1db09b4c5..efd072fce511d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java @@ -30,7 +30,7 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.common.io.PathUtils; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.core.index.Index; import org.opensearch.index.IndexModule; import org.opensearch.index.IndexNotFoundException; diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java index 4bba25039d376..420df6b4c34c8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -66,7 +66,7 @@ import org.opensearch.cluster.routing.UnassignedInfo; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.util.BytesRefUtils; import org.opensearch.core.index.Index; diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SnapshotStatusApisIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SnapshotStatusApisIT.java index c22dd90cc930b..e47a2b94fc715 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SnapshotStatusApisIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SnapshotStatusApisIT.java @@ -46,7 +46,7 @@ import org.opensearch.client.Client; import org.opensearch.cluster.SnapshotsInProgress; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.io.IOUtils; diff --git a/server/src/main/java/org/opensearch/OpenSearchServerException.java b/server/src/main/java/org/opensearch/OpenSearchServerException.java index d53164427debf..b8c6d1e78d25a 100644 --- a/server/src/main/java/org/opensearch/OpenSearchServerException.java +++ b/server/src/main/java/org/opensearch/OpenSearchServerException.java @@ -288,14 +288,6 @@ public static void registerExceptions() { UNKNOWN_VERSION_ADDED ) ); - registerExceptionHandle( - new OpenSearchExceptionHandle( - org.opensearch.OpenSearchParseException.class, - org.opensearch.OpenSearchParseException::new, - 35, - UNKNOWN_VERSION_ADDED - ) - ); registerExceptionHandle( new OpenSearchExceptionHandle( org.opensearch.search.SearchException.class, @@ -915,14 +907,6 @@ public static void registerExceptions() { UNKNOWN_VERSION_ADDED ) ); - registerExceptionHandle( - new OpenSearchExceptionHandle( - org.opensearch.common.breaker.CircuitBreakingException.class, - org.opensearch.common.breaker.CircuitBreakingException::new, - 133, - UNKNOWN_VERSION_ADDED - ) - ); registerExceptionHandle( new OpenSearchExceptionHandle( org.opensearch.transport.NodeNotConnectedException.class, diff --git a/server/src/main/java/org/opensearch/action/ActionModule.java b/server/src/main/java/org/opensearch/action/ActionModule.java index 2ce1d4bcd4b02..b775095861150 100644 --- a/server/src/main/java/org/opensearch/action/ActionModule.java +++ b/server/src/main/java/org/opensearch/action/ActionModule.java @@ -301,7 +301,7 @@ import org.opensearch.index.seqno.RetentionLeaseActions; import org.opensearch.identity.IdentityService; import org.opensearch.indices.SystemIndices; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.persistent.CompletionPersistentTaskAction; import org.opensearch.persistent.RemovePersistentTaskAction; import org.opensearch.persistent.StartPersistentTaskAction; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java index acf40e3a9de3c..1d2dbf66920f8 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java @@ -40,7 +40,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.http.HttpInfo; import org.opensearch.ingest.IngestInfo; import org.opensearch.monitor.jvm.JvmInfo; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java index 3b6c85ee6e091..82e03d1fd79ac 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodeStats.java @@ -49,7 +49,7 @@ import org.opensearch.index.stats.ShardIndexingPressureStats; import org.opensearch.index.store.remote.filecache.FileCacheStats; import org.opensearch.indices.NodeIndicesStats; -import org.opensearch.indices.breaker.AllCircuitBreakerStats; +import org.opensearch.core.indices.breaker.AllCircuitBreakerStats; import org.opensearch.ingest.IngestStats; import org.opensearch.monitor.fs.FsInfo; import org.opensearch.monitor.jvm.JvmStats; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStats.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStats.java index 282585a43183a..8b718aeaf70c7 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStats.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStats.java @@ -36,7 +36,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContentObject; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodes.java b/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodes.java index 699884ca0eab3..bddb3fb746eb1 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -42,7 +42,7 @@ import org.opensearch.common.network.NetworkModule; import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.TransportAddress; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.ToXContentFragment; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java index a716959614065..27f20f028ea74 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -39,6 +39,7 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.core.xcontent.MediaType; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentType; @@ -200,8 +201,8 @@ public CreateIndexRequestBuilder addAlias(Alias alias) { /** * Sets the settings and mappings as a single source. */ - public CreateIndexRequestBuilder setSource(String source, XContentType xContentType) { - request.source(source, xContentType); + public CreateIndexRequestBuilder setSource(String source, MediaType mediaType) { + request.source(source, mediaType); return this; } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/datastream/DataStreamsStatsAction.java b/server/src/main/java/org/opensearch/action/admin/indices/datastream/DataStreamsStatsAction.java index 0b2375850f1fc..2b536d24c946f 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/datastream/DataStreamsStatsAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/datastream/DataStreamsStatsAction.java @@ -55,7 +55,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexService; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/Condition.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/Condition.java index 280dc307447b7..e014d6d703500 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/Condition.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/Condition.java @@ -34,7 +34,7 @@ import org.opensearch.Version; import org.opensearch.core.common.io.stream.NamedWriteable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContentFragment; import java.util.Objects; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/MaxSizeCondition.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/MaxSizeCondition.java index f0ab571ea9f75..faa3558420a5c 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/MaxSizeCondition.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/MaxSizeCondition.java @@ -34,8 +34,8 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java index 95a4b6573611d..11a7555d15d3f 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java @@ -40,7 +40,7 @@ import org.opensearch.core.ParseField; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.ObjectParser; import org.opensearch.core.xcontent.XContentParser; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java index ed598c14acec3..1603f95f4d512 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java @@ -36,7 +36,7 @@ import org.opensearch.action.support.clustermanager.ClusterManagerNodeOperationRequestBuilder; import org.opensearch.client.OpenSearchClient; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; /** diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/TransportRolloverAction.java index 4ddff1563885a..ca2921ceb70c8 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -55,7 +55,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.inject.Inject; import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.index.shard.DocsStats; import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequest.java index b2e7ed92e608a..aef2dc8f2c7c8 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequest.java @@ -42,7 +42,7 @@ import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.ParseField; import org.opensearch.core.xcontent.ObjectParser; import org.opensearch.core.xcontent.ToXContentObject; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequestBuilder.java index eb05c0a69b78b..855e678c77b9b 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/shrink/ResizeRequestBuilder.java @@ -37,7 +37,7 @@ import org.opensearch.action.support.master.AcknowledgedRequestBuilder; import org.opensearch.client.OpenSearchClient; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; /** * Transport request builder for resizing an index diff --git a/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java b/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java index 0d31c90a98f56..328768bc9ae0e 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java @@ -57,7 +57,7 @@ import org.opensearch.core.index.shard.ShardId; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.index.store.StoreStats; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStats.java b/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStats.java index 5a3a34e9a2ebe..e4abaef4ddfa8 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStats.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/stats/CommonStats.java @@ -37,7 +37,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java index 7fe663a347ee3..28215475416ba 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/upgrade/get/UpgradeStatusResponse.java @@ -36,7 +36,7 @@ import org.opensearch.action.support.broadcast.BroadcastResponse; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.XContentBuilder; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/action/bulk/BulkProcessor.java b/server/src/main/java/org/opensearch/action/bulk/BulkProcessor.java index 4695b44c4986b..a01fc82fffd01 100644 --- a/server/src/main/java/org/opensearch/action/bulk/BulkProcessor.java +++ b/server/src/main/java/org/opensearch/action/bulk/BulkProcessor.java @@ -41,8 +41,8 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentType; import org.opensearch.threadpool.Scheduler; diff --git a/server/src/main/java/org/opensearch/action/index/IndexRequest.java b/server/src/main/java/org/opensearch/action/index/IndexRequest.java index 96d27917d5164..ac4c8436aab5a 100644 --- a/server/src/main/java/org/opensearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/opensearch/action/index/IndexRequest.java @@ -53,7 +53,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.lucene.uid.Versions; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.Strings; diff --git a/server/src/main/java/org/opensearch/action/search/QueryPhaseResultConsumer.java b/server/src/main/java/org/opensearch/action/search/QueryPhaseResultConsumer.java index 45c2dc4f29403..43c95133f12d6 100644 --- a/server/src/main/java/org/opensearch/action/search/QueryPhaseResultConsumer.java +++ b/server/src/main/java/org/opensearch/action/search/QueryPhaseResultConsumer.java @@ -35,8 +35,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.search.TopDocs; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; import org.opensearch.common.util.concurrent.AbstractRunnable; diff --git a/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java b/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java index 512d3295c4cfc..2f002d21d9b68 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java @@ -45,7 +45,7 @@ import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.TotalHits.Relation; import org.apache.lucene.search.grouping.CollapseTopFieldDocs; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; import org.opensearch.search.DocValueFormat; diff --git a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java index c4af9ffa20194..1011f17c98dd6 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java @@ -57,7 +57,7 @@ import org.opensearch.cluster.routing.ShardIterator; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.Nullable; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.inject.Inject; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.common.io.stream.Writeable; @@ -70,7 +70,7 @@ import org.opensearch.core.index.Index; import org.opensearch.index.query.Rewriteable; import org.opensearch.core.index.shard.ShardId; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.search.SearchPhaseResult; import org.opensearch.search.SearchService; import org.opensearch.search.SearchShardTarget; diff --git a/server/src/main/java/org/opensearch/action/support/replication/ReplicationOperation.java b/server/src/main/java/org/opensearch/action/support/replication/ReplicationOperation.java index 1affc9202c32b..d3a617853b019 100644 --- a/server/src/main/java/org/opensearch/action/support/replication/ReplicationOperation.java +++ b/server/src/main/java/org/opensearch/action/support/replication/ReplicationOperation.java @@ -47,7 +47,7 @@ import org.opensearch.cluster.routing.IndexShardRoutingTable; import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.common.Nullable; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; diff --git a/server/src/main/java/org/opensearch/cluster/ClusterInfo.java b/server/src/main/java/org/opensearch/cluster/ClusterInfo.java index 3793b5094a4cb..7b1a2f0f12b69 100644 --- a/server/src/main/java/org/opensearch/cluster/ClusterInfo.java +++ b/server/src/main/java/org/opensearch/cluster/ClusterInfo.java @@ -34,7 +34,7 @@ import org.opensearch.Version; import org.opensearch.cluster.routing.ShardRouting; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/cluster/DiskUsage.java b/server/src/main/java/org/opensearch/cluster/DiskUsage.java index 961bfce053243..c472522baee51 100644 --- a/server/src/main/java/org/opensearch/cluster/DiskUsage.java +++ b/server/src/main/java/org/opensearch/cluster/DiskUsage.java @@ -32,11 +32,11 @@ package org.opensearch.cluster; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/cluster/NodeConnectionsService.java b/server/src/main/java/org/opensearch/cluster/NodeConnectionsService.java index 0014d5c61fb2d..b5a9e4e7b30a0 100644 --- a/server/src/main/java/org/opensearch/cluster/NodeConnectionsService.java +++ b/server/src/main/java/org/opensearch/cluster/NodeConnectionsService.java @@ -44,7 +44,7 @@ import org.opensearch.cluster.node.DiscoveryNodes; import org.opensearch.cluster.service.ClusterApplier; import org.opensearch.common.Nullable; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.inject.Inject; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; diff --git a/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java index de751d881bc0e..1c38e68c43466 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java @@ -62,7 +62,7 @@ import org.opensearch.common.Priority; import org.opensearch.common.SetOnce; import org.opensearch.common.Strings; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; diff --git a/server/src/main/java/org/opensearch/cluster/routing/DelayedAllocationService.java b/server/src/main/java/org/opensearch/cluster/routing/DelayedAllocationService.java index 844b78dccc59b..74a3342addc50 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/DelayedAllocationService.java +++ b/server/src/main/java/org/opensearch/cluster/routing/DelayedAllocationService.java @@ -42,7 +42,7 @@ import org.opensearch.cluster.routing.allocation.AllocationService; import org.opensearch.cluster.routing.allocation.RoutingAllocation; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.inject.Inject; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/DiskThresholdSettings.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/DiskThresholdSettings.java index d54236ada6780..542aa9ca5becf 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/DiskThresholdSettings.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/DiskThresholdSettings.java @@ -34,13 +34,13 @@ import org.opensearch.OpenSearchParseException; import org.opensearch.Version; -import org.opensearch.common.Strings; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.RatioValue; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.common.Strings; import java.util.Arrays; import java.util.Iterator; @@ -400,13 +400,13 @@ public boolean isCreateIndexBlockAutoReleaseEnabled() { String describeLowThreshold() { return freeBytesThresholdLow.equals(ByteSizeValue.ZERO) - ? Strings.format1Decimals(100.0 - freeDiskThresholdLow, "%") + ? org.opensearch.core.common.Strings.format1Decimals(100.0 - freeDiskThresholdLow, "%") : freeBytesThresholdLow.toString(); } String describeHighThreshold() { return freeBytesThresholdHigh.equals(ByteSizeValue.ZERO) - ? Strings.format1Decimals(100.0 - freeDiskThresholdHigh, "%") + ? org.opensearch.core.common.Strings.format1Decimals(100.0 - freeDiskThresholdHigh, "%") : freeBytesThresholdHigh.toString(); } diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/NodeAllocationResult.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/NodeAllocationResult.java index 946e2d5e5f3dc..ad568fc5ffa57 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/NodeAllocationResult.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/NodeAllocationResult.java @@ -39,7 +39,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 61b96184abcc4..3325b3475fd73 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -47,11 +47,11 @@ import org.opensearch.cluster.routing.ShardRoutingState; import org.opensearch.cluster.routing.allocation.DiskThresholdSettings; import org.opensearch.cluster.routing.allocation.RoutingAllocation; -import org.opensearch.common.Strings; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; +import org.opensearch.core.common.Strings; import org.opensearch.core.index.Index; import org.opensearch.core.index.shard.ShardId; import org.opensearch.index.store.remote.filecache.FileCacheStats; diff --git a/server/src/main/java/org/opensearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/opensearch/cluster/service/ClusterApplierService.java index 7f1c9f01f7e6f..c03f5ae619edf 100644 --- a/server/src/main/java/org/opensearch/cluster/service/ClusterApplierService.java +++ b/server/src/main/java/org/opensearch/cluster/service/ClusterApplierService.java @@ -51,7 +51,7 @@ import org.opensearch.common.Priority; import org.opensearch.common.StopWatch; import org.opensearch.common.StopWatch.TimingHandle; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; diff --git a/server/src/main/java/org/opensearch/cluster/service/ClusterService.java b/server/src/main/java/org/opensearch/cluster/service/ClusterService.java index a605c41bdeff8..e097803d86b48 100644 --- a/server/src/main/java/org/opensearch/cluster/service/ClusterService.java +++ b/server/src/main/java/org/opensearch/cluster/service/ClusterService.java @@ -45,7 +45,7 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.routing.OperationRouting; import org.opensearch.cluster.routing.RerouteService; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; diff --git a/server/src/main/java/org/opensearch/cluster/service/MasterService.java b/server/src/main/java/org/opensearch/cluster/service/MasterService.java index 790efaef95292..9f76460d2acdd 100644 --- a/server/src/main/java/org/opensearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/opensearch/cluster/service/MasterService.java @@ -55,7 +55,7 @@ import org.opensearch.cluster.routing.RoutingTable; import org.opensearch.common.Nullable; import org.opensearch.common.Priority; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; diff --git a/server/src/main/java/org/opensearch/common/FieldMemoryStats.java b/server/src/main/java/org/opensearch/common/FieldMemoryStats.java index 1f8a6aba0c883..86a2fe1397cec 100644 --- a/server/src/main/java/org/opensearch/common/FieldMemoryStats.java +++ b/server/src/main/java/org/opensearch/common/FieldMemoryStats.java @@ -35,7 +35,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.XContentBuilder; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/common/Strings.java b/server/src/main/java/org/opensearch/common/Strings.java index 8e92c86836723..e9f4d32ed2664 100644 --- a/server/src/main/java/org/opensearch/common/Strings.java +++ b/server/src/main/java/org/opensearch/common/Strings.java @@ -155,29 +155,6 @@ public static String[] split(String toSplit, String delimiter) { return new String[] { beforeDelimiter, afterDelimiter }; } - /** - * Format the double value with a single decimal points, trimming trailing '.0'. - */ - public static String format1Decimals(double value, String suffix) { - String p = String.valueOf(value); - int ix = p.indexOf('.') + 1; - int ex = p.indexOf('E'); - char fraction = p.charAt(ix); - if (fraction == '0') { - if (ex != -1) { - return p.substring(0, ix - 1) + p.substring(ex) + suffix; - } else { - return p.substring(0, ix - 1) + suffix; - } - } else { - if (ex != -1) { - return p.substring(0, ix) + fraction + p.substring(ex) + suffix; - } else { - return p.substring(0, ix) + fraction + suffix; - } - } - } - private Strings() {} public static byte[] toUTF8Bytes(CharSequence charSequence) { diff --git a/server/src/main/java/org/opensearch/common/breaker/ChildMemoryCircuitBreaker.java b/server/src/main/java/org/opensearch/common/breaker/ChildMemoryCircuitBreaker.java index 923f592c6bc79..de4e6ad433c55 100644 --- a/server/src/main/java/org/opensearch/common/breaker/ChildMemoryCircuitBreaker.java +++ b/server/src/main/java/org/opensearch/common/breaker/ChildMemoryCircuitBreaker.java @@ -34,7 +34,9 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.indices.breaker.BreakerSettings; import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; diff --git a/server/src/main/java/org/opensearch/common/io/DiskIoBufferPool.java b/server/src/main/java/org/opensearch/common/io/DiskIoBufferPool.java index 80b5dd353703c..e853a6ddc34d2 100644 --- a/server/src/main/java/org/opensearch/common/io/DiskIoBufferPool.java +++ b/server/src/main/java/org/opensearch/common/io/DiskIoBufferPool.java @@ -32,7 +32,7 @@ package org.opensearch.common.io; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.threadpool.ThreadPool; import java.nio.ByteBuffer; diff --git a/server/src/main/java/org/opensearch/common/network/NetworkModule.java b/server/src/main/java/org/opensearch/common/network/NetworkModule.java index d93f8d7c98b32..c052af41d66d9 100644 --- a/server/src/main/java/org/opensearch/common/network/NetworkModule.java +++ b/server/src/main/java/org/opensearch/common/network/NetworkModule.java @@ -53,7 +53,7 @@ import org.opensearch.core.xcontent.XContentParser; import org.opensearch.http.HttpServerTransport; import org.opensearch.index.shard.PrimaryReplicaSyncer.ResyncTask; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.plugins.NetworkPlugin; import org.opensearch.tasks.RawTaskStatus; import org.opensearch.tasks.Task; diff --git a/server/src/main/java/org/opensearch/common/network/NetworkService.java b/server/src/main/java/org/opensearch/common/network/NetworkService.java index 0fb299ef66e70..1a893283eae21 100644 --- a/server/src/main/java/org/opensearch/common/network/NetworkService.java +++ b/server/src/main/java/org/opensearch/common/network/NetworkService.java @@ -34,7 +34,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/common/settings/Setting.java b/server/src/main/java/org/opensearch/common/settings/Setting.java index 86d1d4f90ed18..ae390f1cc60ff 100644 --- a/server/src/main/java/org/opensearch/common/settings/Setting.java +++ b/server/src/main/java/org/opensearch/common/settings/Setting.java @@ -40,21 +40,21 @@ import org.opensearch.common.Nullable; import org.opensearch.common.Strings; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.common.regex.Regex; -import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.MemorySizeValue; import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; import java.io.IOException; import java.util.ArrayList; diff --git a/server/src/main/java/org/opensearch/common/settings/Settings.java b/server/src/main/java/org/opensearch/common/settings/Settings.java index 10a619d833add..cd80e9727e0df 100644 --- a/server/src/main/java/org/opensearch/common/settings/Settings.java +++ b/server/src/main/java/org/opensearch/common/settings/Settings.java @@ -43,8 +43,8 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.logging.LogConfigurator; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.MemorySizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.LoggingDeprecationHandler; @@ -88,7 +88,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.opensearch.common.unit.ByteSizeValue.parseBytesSizeValue; +import static org.opensearch.core.common.unit.ByteSizeValue.parseBytesSizeValue; import static org.opensearch.common.unit.TimeValue.parseTimeValue; /** diff --git a/server/src/main/java/org/opensearch/common/settings/WriteableSetting.java b/server/src/main/java/org/opensearch/common/settings/WriteableSetting.java index 8664b14119694..e197e88b3012e 100644 --- a/server/src/main/java/org/opensearch/common/settings/WriteableSetting.java +++ b/server/src/main/java/org/opensearch/common/settings/WriteableSetting.java @@ -23,7 +23,7 @@ import org.opensearch.common.settings.Setting.MinTimeValueParser; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Setting.RegexValidator; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/main/java/org/opensearch/common/unit/MemorySizeValue.java b/server/src/main/java/org/opensearch/common/unit/MemorySizeValue.java index 18aae6277c379..96f128cb6dc38 100644 --- a/server/src/main/java/org/opensearch/common/unit/MemorySizeValue.java +++ b/server/src/main/java/org/opensearch/common/unit/MemorySizeValue.java @@ -33,11 +33,13 @@ package org.opensearch.common.unit; import org.opensearch.OpenSearchParseException; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.monitor.jvm.JvmInfo; import java.util.Objects; -import static org.opensearch.common.unit.ByteSizeValue.parseBytesSizeValue; +import static org.opensearch.core.common.unit.ByteSizeValue.parseBytesSizeValue; /** * Utility methods to get memory sizes. diff --git a/server/src/main/java/org/opensearch/common/unit/SizeValue.java b/server/src/main/java/org/opensearch/common/unit/SizeValue.java index 3b73955f8d046..766199ebbc8f8 100644 --- a/server/src/main/java/org/opensearch/common/unit/SizeValue.java +++ b/server/src/main/java/org/opensearch/common/unit/SizeValue.java @@ -33,7 +33,7 @@ package org.opensearch.common.unit; import org.opensearch.OpenSearchParseException; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/common/util/BigArrays.java b/server/src/main/java/org/opensearch/common/util/BigArrays.java index 45c2092601a7b..3b5a057872bbc 100644 --- a/server/src/main/java/org/opensearch/common/util/BigArrays.java +++ b/server/src/main/java/org/opensearch/common/util/BigArrays.java @@ -36,14 +36,14 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.opensearch.common.Nullable; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; import org.opensearch.common.recycler.Recycler; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.core.common.util.BigArray; import org.opensearch.core.common.util.ByteArray; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import java.util.Arrays; diff --git a/server/src/main/java/org/opensearch/common/util/PageCacheRecycler.java b/server/src/main/java/org/opensearch/common/util/PageCacheRecycler.java index f8c690471fb61..65be2a082f084 100644 --- a/server/src/main/java/org/opensearch/common/util/PageCacheRecycler.java +++ b/server/src/main/java/org/opensearch/common/util/PageCacheRecycler.java @@ -39,7 +39,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import java.util.Arrays; diff --git a/server/src/main/java/org/opensearch/common/util/concurrent/AbstractLifecycleRunnable.java b/server/src/main/java/org/opensearch/common/util/concurrent/AbstractLifecycleRunnable.java index b55280d43a473..12fe437b390bf 100644 --- a/server/src/main/java/org/opensearch/common/util/concurrent/AbstractLifecycleRunnable.java +++ b/server/src/main/java/org/opensearch/common/util/concurrent/AbstractLifecycleRunnable.java @@ -32,7 +32,7 @@ package org.opensearch.common.util.concurrent; import org.apache.logging.log4j.Logger; -import org.opensearch.common.component.Lifecycle; +import org.opensearch.common.lifecycle.Lifecycle; import java.util.Objects; diff --git a/server/src/main/java/org/opensearch/common/xcontent/XContentOpenSearchExtension.java b/server/src/main/java/org/opensearch/common/xcontent/XContentOpenSearchExtension.java index 924db8bdea1dd..ab295b12302e1 100644 --- a/server/src/main/java/org/opensearch/common/xcontent/XContentOpenSearchExtension.java +++ b/server/src/main/java/org/opensearch/common/xcontent/XContentOpenSearchExtension.java @@ -35,7 +35,7 @@ import org.apache.lucene.util.BytesRef; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.time.DateFormatter; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentBuilderExtension; diff --git a/server/src/main/java/org/opensearch/discovery/Discovery.java b/server/src/main/java/org/opensearch/discovery/Discovery.java index 25b3cb6c2b90d..9d6807b6522c9 100644 --- a/server/src/main/java/org/opensearch/discovery/Discovery.java +++ b/server/src/main/java/org/opensearch/discovery/Discovery.java @@ -33,7 +33,7 @@ package org.opensearch.discovery; import org.opensearch.cluster.coordination.ClusterStatePublisher; -import org.opensearch.common.component.LifecycleComponent; +import org.opensearch.common.lifecycle.LifecycleComponent; /** * A pluggable module allowing to implement discovery of other nodes, publishing of the cluster diff --git a/server/src/main/java/org/opensearch/discovery/SeedHostsResolver.java b/server/src/main/java/org/opensearch/discovery/SeedHostsResolver.java index 14805648c6771..cef7853011b82 100644 --- a/server/src/main/java/org/opensearch/discovery/SeedHostsResolver.java +++ b/server/src/main/java/org/opensearch/discovery/SeedHostsResolver.java @@ -35,7 +35,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.common.SetOnce; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.TransportAddress; diff --git a/server/src/main/java/org/opensearch/env/NodeEnvironment.java b/server/src/main/java/org/opensearch/env/NodeEnvironment.java index f7d1f6e4343cc..59c5b43a7a904 100644 --- a/server/src/main/java/org/opensearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/opensearch/env/NodeEnvironment.java @@ -57,7 +57,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.io.IOUtils; import org.opensearch.core.util.FileSystemUtils; diff --git a/server/src/main/java/org/opensearch/gateway/GatewayService.java b/server/src/main/java/org/opensearch/gateway/GatewayService.java index cf105380e98ad..b8fd3d147523b 100644 --- a/server/src/main/java/org/opensearch/gateway/GatewayService.java +++ b/server/src/main/java/org/opensearch/gateway/GatewayService.java @@ -46,7 +46,7 @@ import org.opensearch.cluster.node.DiscoveryNodes; import org.opensearch.cluster.routing.allocation.AllocationService; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.inject.Inject; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; diff --git a/server/src/main/java/org/opensearch/gateway/ReplicaShardAllocator.java b/server/src/main/java/org/opensearch/gateway/ReplicaShardAllocator.java index 5216dd2fcb4b5..4c90ac1710718 100644 --- a/server/src/main/java/org/opensearch/gateway/ReplicaShardAllocator.java +++ b/server/src/main/java/org/opensearch/gateway/ReplicaShardAllocator.java @@ -48,7 +48,7 @@ import org.opensearch.cluster.routing.allocation.decider.Decision; import org.opensearch.common.Nullable; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.index.store.StoreFileMetadata; import org.opensearch.indices.store.TransportNodesListShardStoreMetadata; diff --git a/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java index 46f296f52ae01..0ba49be01d193 100644 --- a/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java @@ -37,7 +37,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.opensearch.ExceptionsHelper; import org.opensearch.action.ActionListener; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.network.CloseableChannel; import org.opensearch.common.network.NetworkAddress; import org.opensearch.common.network.NetworkService; @@ -47,7 +47,7 @@ import org.opensearch.common.transport.NetworkExceptionHelper; import org.opensearch.common.transport.PortsRange; import org.opensearch.common.transport.TransportAddress; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.core.common.Strings; diff --git a/server/src/main/java/org/opensearch/http/HttpInfo.java b/server/src/main/java/org/opensearch/http/HttpInfo.java index a5e981e98e3de..24c29b8dc7444 100644 --- a/server/src/main/java/org/opensearch/http/HttpInfo.java +++ b/server/src/main/java/org/opensearch/http/HttpInfo.java @@ -38,7 +38,7 @@ import org.opensearch.common.network.InetAddresses; import org.opensearch.common.transport.BoundTransportAddress; import org.opensearch.common.transport.TransportAddress; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.node.ReportingService; diff --git a/server/src/main/java/org/opensearch/http/HttpServerTransport.java b/server/src/main/java/org/opensearch/http/HttpServerTransport.java index 425fa23047764..6549f0786fcda 100644 --- a/server/src/main/java/org/opensearch/http/HttpServerTransport.java +++ b/server/src/main/java/org/opensearch/http/HttpServerTransport.java @@ -32,7 +32,7 @@ package org.opensearch.http; -import org.opensearch.common.component.LifecycleComponent; +import org.opensearch.common.lifecycle.LifecycleComponent; import org.opensearch.common.transport.BoundTransportAddress; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.node.ReportingService; diff --git a/server/src/main/java/org/opensearch/http/HttpTransportSettings.java b/server/src/main/java/org/opensearch/http/HttpTransportSettings.java index 4522a59d67c05..acff87442d0a8 100644 --- a/server/src/main/java/org/opensearch/http/HttpTransportSettings.java +++ b/server/src/main/java/org/opensearch/http/HttpTransportSettings.java @@ -37,8 +37,8 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.transport.PortsRange; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import java.util.Collections; diff --git a/server/src/main/java/org/opensearch/index/IndexModule.java b/server/src/main/java/org/opensearch/index/IndexModule.java index e4e3a79c8e60c..8a0d563d51107 100644 --- a/server/src/main/java/org/opensearch/index/IndexModule.java +++ b/server/src/main/java/org/opensearch/index/IndexModule.java @@ -76,7 +76,7 @@ import org.opensearch.index.store.remote.filecache.FileCache; import org.opensearch.index.translog.TranslogFactory; import org.opensearch.indices.IndicesQueryCache; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.indices.fielddata.cache.IndicesFieldDataCache; import org.opensearch.indices.mapper.MapperRegistry; import org.opensearch.indices.recovery.RecoveryState; diff --git a/server/src/main/java/org/opensearch/index/IndexService.java b/server/src/main/java/org/opensearch/index/IndexService.java index 1ecc98b7f69f2..aed09710deec8 100644 --- a/server/src/main/java/org/opensearch/index/IndexService.java +++ b/server/src/main/java/org/opensearch/index/IndexService.java @@ -91,7 +91,7 @@ import org.opensearch.index.store.Store; import org.opensearch.index.translog.Translog; import org.opensearch.index.translog.TranslogFactory; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.indices.cluster.IndicesClusterStateService; import org.opensearch.indices.fielddata.cache.IndicesFieldDataCache; import org.opensearch.indices.mapper.MapperRegistry; diff --git a/server/src/main/java/org/opensearch/index/IndexSettings.java b/server/src/main/java/org/opensearch/index/IndexSettings.java index 0749ad0876534..d0fdbd9ac4e03 100644 --- a/server/src/main/java/org/opensearch/index/IndexSettings.java +++ b/server/src/main/java/org/opensearch/index/IndexSettings.java @@ -41,8 +41,8 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.Strings; diff --git a/server/src/main/java/org/opensearch/index/IndexingPressure.java b/server/src/main/java/org/opensearch/index/IndexingPressure.java index 33be340feb335..9f57514a0751a 100644 --- a/server/src/main/java/org/opensearch/index/IndexingPressure.java +++ b/server/src/main/java/org/opensearch/index/IndexingPressure.java @@ -37,7 +37,7 @@ import org.opensearch.common.inject.Inject; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.lease.Releasable; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; import org.opensearch.index.stats.IndexingPressureStats; diff --git a/server/src/main/java/org/opensearch/index/MergePolicyConfig.java b/server/src/main/java/org/opensearch/index/MergePolicyConfig.java index d0416aaf54a40..fe2af21dfe039 100644 --- a/server/src/main/java/org/opensearch/index/MergePolicyConfig.java +++ b/server/src/main/java/org/opensearch/index/MergePolicyConfig.java @@ -38,8 +38,8 @@ import org.apache.lucene.index.TieredMergePolicy; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; /** * A shard in opensearch is a Lucene index, and a Lucene index is broken diff --git a/server/src/main/java/org/opensearch/index/SegmentReplicationShardStats.java b/server/src/main/java/org/opensearch/index/SegmentReplicationShardStats.java index b0e6e5076d03c..3315a059ee783 100644 --- a/server/src/main/java/org/opensearch/index/SegmentReplicationShardStats.java +++ b/server/src/main/java/org/opensearch/index/SegmentReplicationShardStats.java @@ -12,7 +12,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/index/cache/query/QueryCacheStats.java b/server/src/main/java/org/opensearch/index/cache/query/QueryCacheStats.java index a4f2628b5e5a3..88077aafb1495 100644 --- a/server/src/main/java/org/opensearch/index/cache/query/QueryCacheStats.java +++ b/server/src/main/java/org/opensearch/index/cache/query/QueryCacheStats.java @@ -36,7 +36,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/index/cache/request/RequestCacheStats.java b/server/src/main/java/org/opensearch/index/cache/request/RequestCacheStats.java index ab8b4706e4ebe..24f68899c2ac7 100644 --- a/server/src/main/java/org/opensearch/index/cache/request/RequestCacheStats.java +++ b/server/src/main/java/org/opensearch/index/cache/request/RequestCacheStats.java @@ -35,7 +35,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/index/engine/Engine.java b/server/src/main/java/org/opensearch/index/engine/Engine.java index 326e6aef45b08..74c9c25dc4c1f 100644 --- a/server/src/main/java/org/opensearch/index/engine/Engine.java +++ b/server/src/main/java/org/opensearch/index/engine/Engine.java @@ -69,7 +69,7 @@ import org.opensearch.common.lucene.uid.VersionsAndSeqNoResolver; import org.opensearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndVersion; import org.opensearch.common.metrics.CounterMetric; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ReleasableLock; import org.opensearch.common.lease.Releasable; diff --git a/server/src/main/java/org/opensearch/index/engine/EngineConfig.java b/server/src/main/java/org/opensearch/index/engine/EngineConfig.java index 71bd64426161e..7900e63a95c39 100644 --- a/server/src/main/java/org/opensearch/index/engine/EngineConfig.java +++ b/server/src/main/java/org/opensearch/index/engine/EngineConfig.java @@ -43,7 +43,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.MemorySizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.index.IndexSettings; @@ -57,7 +57,7 @@ import org.opensearch.index.translog.TranslogDeletionPolicyFactory; import org.opensearch.index.translog.TranslogFactory; import org.opensearch.indices.IndexingMemoryController; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.threadpool.ThreadPool; import java.util.Comparator; diff --git a/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java b/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java index 744df13265250..c606c00228db9 100644 --- a/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java +++ b/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java @@ -30,7 +30,7 @@ import org.opensearch.index.translog.TranslogConfig; import org.opensearch.index.translog.TranslogDeletionPolicyFactory; import org.opensearch.index.translog.TranslogFactory; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.plugins.EnginePlugin; import org.opensearch.plugins.PluginsService; import org.opensearch.threadpool.ThreadPool; diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index 6f8b6d449695e..77d63dfaade54 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -81,7 +81,7 @@ import org.opensearch.common.lucene.uid.VersionsAndSeqNoResolver; import org.opensearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndSeqNo; import org.opensearch.common.metrics.CounterMetric; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.concurrent.KeyedLock; diff --git a/server/src/main/java/org/opensearch/index/engine/NRTReplicationEngine.java b/server/src/main/java/org/opensearch/index/engine/NRTReplicationEngine.java index b55508b7facd3..e852658d7b3ba 100644 --- a/server/src/main/java/org/opensearch/index/engine/NRTReplicationEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/NRTReplicationEngine.java @@ -16,7 +16,7 @@ import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ReleasableLock; import org.opensearch.common.util.io.IOUtils; diff --git a/server/src/main/java/org/opensearch/index/engine/OpenSearchConcurrentMergeScheduler.java b/server/src/main/java/org/opensearch/index/engine/OpenSearchConcurrentMergeScheduler.java index ce0f1e85e294d..6736354517ace 100644 --- a/server/src/main/java/org/opensearch/index/engine/OpenSearchConcurrentMergeScheduler.java +++ b/server/src/main/java/org/opensearch/index/engine/OpenSearchConcurrentMergeScheduler.java @@ -41,7 +41,7 @@ import org.opensearch.common.metrics.CounterMetric; import org.opensearch.common.metrics.MeanMetric; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.common.util.concurrent.OpenSearchExecutors; diff --git a/server/src/main/java/org/opensearch/index/engine/Segment.java b/server/src/main/java/org/opensearch/index/engine/Segment.java index 035d5bb293303..36b84723f0229 100644 --- a/server/src/main/java/org/opensearch/index/engine/Segment.java +++ b/server/src/main/java/org/opensearch/index/engine/Segment.java @@ -44,7 +44,7 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.common.lucene.Lucene; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import java.io.IOException; import java.util.Map; diff --git a/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java b/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java index dbadd50ebd1a1..1fd7f30237d0b 100644 --- a/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java +++ b/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java @@ -36,7 +36,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/index/fielddata/FieldDataStats.java b/server/src/main/java/org/opensearch/index/fielddata/FieldDataStats.java index 896039313ea1e..fc7db7d316214 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/FieldDataStats.java +++ b/server/src/main/java/org/opensearch/index/fielddata/FieldDataStats.java @@ -37,7 +37,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/index/fielddata/IndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/IndexFieldData.java index 36e6a242ecdec..05326b7027cac 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/IndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/IndexFieldData.java @@ -50,7 +50,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.util.BigArrays; import org.opensearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.search.DocValueFormat; import org.opensearch.search.MultiValueMode; import org.opensearch.search.aggregations.support.ValuesSourceType; diff --git a/server/src/main/java/org/opensearch/index/fielddata/IndexFieldDataService.java b/server/src/main/java/org/opensearch/index/fielddata/IndexFieldDataService.java index 0b370893cd90d..6914f5df31da8 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/IndexFieldDataService.java +++ b/server/src/main/java/org/opensearch/index/fielddata/IndexFieldDataService.java @@ -41,7 +41,7 @@ import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperService; import org.opensearch.core.index.shard.ShardId; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.indices.fielddata.cache.IndicesFieldDataCache; import org.opensearch.search.lookup.SearchLookup; diff --git a/server/src/main/java/org/opensearch/index/fielddata/RamAccountingTermsEnum.java b/server/src/main/java/org/opensearch/index/fielddata/RamAccountingTermsEnum.java index cd858278afaa3..517361e75d3ea 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/RamAccountingTermsEnum.java +++ b/server/src/main/java/org/opensearch/index/fielddata/RamAccountingTermsEnum.java @@ -34,7 +34,7 @@ import org.apache.lucene.index.FilteredTermsEnum; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.index.fielddata.plain.AbstractIndexOrdinalsFieldData; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java b/server/src/main/java/org/opensearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java index e136e649d088a..90ee43d320743 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java +++ b/server/src/main/java/org/opensearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java @@ -39,13 +39,13 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.packed.PackedInts; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.unit.TimeValue; import org.opensearch.index.fielddata.IndexOrdinalsFieldData; import org.opensearch.index.fielddata.LeafOrdinalsFieldData; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.index.fielddata.plain.AbstractLeafOrdinalsFieldData; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import java.io.IOException; import java.util.Collection; diff --git a/server/src/main/java/org/opensearch/index/fielddata/plain/AbstractGeoShapeIndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/plain/AbstractGeoShapeIndexFieldData.java index 2c6aabf04d4ee..d7b8179398920 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/plain/AbstractGeoShapeIndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/plain/AbstractGeoShapeIndexFieldData.java @@ -15,7 +15,7 @@ import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.IndexFieldDataCache; import org.opensearch.index.fielddata.LeafGeoShapeFieldData; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.search.DocValueFormat; import org.opensearch.search.MultiValueMode; import org.opensearch.search.aggregations.support.ValuesSourceType; diff --git a/server/src/main/java/org/opensearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java index 0b3a5e09064ab..15eae77885686 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java @@ -48,7 +48,7 @@ import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.index.fielddata.ordinals.GlobalOrdinalsBuilder; import org.opensearch.index.fielddata.ordinals.GlobalOrdinalsIndexFieldData; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/index/fielddata/plain/AbstractLatLonPointIndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/plain/AbstractLatLonPointIndexFieldData.java index 6e9a13074a445..9cd824f570b05 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/plain/AbstractLatLonPointIndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/plain/AbstractLatLonPointIndexFieldData.java @@ -44,7 +44,7 @@ import org.opensearch.index.fielddata.IndexFieldDataCache; import org.opensearch.index.fielddata.IndexGeoPointFieldData; import org.opensearch.index.fielddata.LeafGeoPointFieldData; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.search.DocValueFormat; import org.opensearch.search.MultiValueMode; import org.opensearch.search.aggregations.support.ValuesSourceType; diff --git a/server/src/main/java/org/opensearch/index/fielddata/plain/BinaryIndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/plain/BinaryIndexFieldData.java index 3fbc605e35e1d..420864fbd7cb5 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/plain/BinaryIndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/plain/BinaryIndexFieldData.java @@ -40,7 +40,7 @@ import org.opensearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.opensearch.index.fielddata.IndexFieldDataCache; import org.opensearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.search.DocValueFormat; import org.opensearch.search.MultiValueMode; import org.opensearch.search.aggregations.support.ValuesSourceType; diff --git a/server/src/main/java/org/opensearch/index/fielddata/plain/BytesBinaryIndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/plain/BytesBinaryIndexFieldData.java index e1a08344e68c7..5386d0eee2108 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/plain/BytesBinaryIndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/plain/BytesBinaryIndexFieldData.java @@ -40,7 +40,7 @@ import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.opensearch.index.fielddata.IndexFieldDataCache; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.search.DocValueFormat; import org.opensearch.search.MultiValueMode; import org.opensearch.search.aggregations.support.ValuesSourceType; diff --git a/server/src/main/java/org/opensearch/index/fielddata/plain/ConstantIndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/plain/ConstantIndexFieldData.java index 2b08498f2b58b..25d199e9a79af 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/plain/ConstantIndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/plain/ConstantIndexFieldData.java @@ -49,7 +49,7 @@ import org.opensearch.index.fielddata.IndexOrdinalsFieldData; import org.opensearch.index.fielddata.LeafOrdinalsFieldData; import org.opensearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.search.DocValueFormat; import org.opensearch.search.MultiValueMode; import org.opensearch.search.aggregations.support.ValuesSourceType; diff --git a/server/src/main/java/org/opensearch/index/fielddata/plain/PagedBytesIndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/plain/PagedBytesIndexFieldData.java index 8f6a1e46ef417..6f880b678a26a 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/plain/PagedBytesIndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/plain/PagedBytesIndexFieldData.java @@ -44,7 +44,7 @@ import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedLongValues; import org.opensearch.common.Nullable; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.util.BigArrays; import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; @@ -55,7 +55,7 @@ import org.opensearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.opensearch.index.fielddata.ordinals.Ordinals; import org.opensearch.index.fielddata.ordinals.OrdinalsBuilder; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.search.DocValueFormat; import org.opensearch.search.MultiValueMode; import org.opensearch.search.aggregations.support.ValuesSourceType; diff --git a/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java index 812010b44b654..e7995f9188275 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java @@ -53,7 +53,7 @@ import org.opensearch.index.fielddata.SortedNumericDoubleValues; import org.opensearch.index.fielddata.fieldcomparator.LongValuesComparatorSource; import org.opensearch.index.mapper.DocValueFetcher; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.opensearch.search.DocValueFormat; import org.opensearch.search.MultiValueMode; diff --git a/server/src/main/java/org/opensearch/index/fielddata/plain/SortedSetOrdinalsIndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/plain/SortedSetOrdinalsIndexFieldData.java index 801186bbc4379..e52eaeb7dfc89 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/plain/SortedSetOrdinalsIndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/plain/SortedSetOrdinalsIndexFieldData.java @@ -46,7 +46,7 @@ import org.opensearch.index.fielddata.LeafOrdinalsFieldData; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.search.DocValueFormat; import org.opensearch.search.MultiValueMode; import org.opensearch.search.aggregations.support.ValuesSourceType; diff --git a/server/src/main/java/org/opensearch/index/mapper/IdFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/IdFieldMapper.java index 4b1395ac02bf1..9ac226dcc0b70 100644 --- a/server/src/main/java/org/opensearch/index/mapper/IdFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/IdFieldMapper.java @@ -54,7 +54,7 @@ import org.opensearch.index.fielddata.plain.PagedBytesIndexFieldData; import org.opensearch.index.query.QueryShardContext; import org.opensearch.indices.IndicesService; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.search.DocValueFormat; import org.opensearch.search.MultiValueMode; import org.opensearch.search.aggregations.support.CoreValuesSourceType; diff --git a/server/src/main/java/org/opensearch/index/merge/MergeStats.java b/server/src/main/java/org/opensearch/index/merge/MergeStats.java index fee0ed904b68a..5bdbd0ab30613 100644 --- a/server/src/main/java/org/opensearch/index/merge/MergeStats.java +++ b/server/src/main/java/org/opensearch/index/merge/MergeStats.java @@ -35,7 +35,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index e09a218ccf83b..bb5088866edb6 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -89,7 +89,7 @@ import org.opensearch.common.metrics.CounterMetric; import org.opensearch.common.metrics.MeanMetric; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.concurrent.AbstractRunnable; @@ -174,7 +174,7 @@ import org.opensearch.index.warmer.WarmerStats; import org.opensearch.indices.IndexingMemoryController; import org.opensearch.indices.IndicesService; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.indices.cluster.IndicesClusterStateService; import org.opensearch.indices.recovery.PeerRecoveryTargetService; import org.opensearch.indices.recovery.RecoveryFailedException; diff --git a/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java b/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java index 4f0affb3035ec..5bc3f5f163352 100644 --- a/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java +++ b/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java @@ -44,8 +44,8 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.index.shard.ShardId; diff --git a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java index d4e779c83644f..5897fa7d513d7 100644 --- a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java @@ -51,7 +51,7 @@ import org.opensearch.cluster.routing.RecoverySource.SnapshotRecoverySource; import org.opensearch.common.UUIDs; import org.opensearch.common.lucene.Lucene; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.index.shard.ShardId; import org.opensearch.core.index.Index; diff --git a/server/src/main/java/org/opensearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java b/server/src/main/java/org/opensearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java index 2d49b153c39f4..0b27afa5e9517 100644 --- a/server/src/main/java/org/opensearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java +++ b/server/src/main/java/org/opensearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java @@ -38,7 +38,7 @@ import org.opensearch.core.ParseField; import org.opensearch.common.Strings; import org.opensearch.common.lucene.Lucene; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/index/stats/IndexingPressurePerShardStats.java b/server/src/main/java/org/opensearch/index/stats/IndexingPressurePerShardStats.java index 9e8c8d29c2058..d69ae02fa8e34 100644 --- a/server/src/main/java/org/opensearch/index/stats/IndexingPressurePerShardStats.java +++ b/server/src/main/java/org/opensearch/index/stats/IndexingPressurePerShardStats.java @@ -11,7 +11,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/index/stats/IndexingPressureStats.java b/server/src/main/java/org/opensearch/index/stats/IndexingPressureStats.java index 8f4f0b661ed33..dcdea8adbbcde 100644 --- a/server/src/main/java/org/opensearch/index/stats/IndexingPressureStats.java +++ b/server/src/main/java/org/opensearch/index/stats/IndexingPressureStats.java @@ -35,7 +35,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/index/store/StoreStats.java b/server/src/main/java/org/opensearch/index/store/StoreStats.java index ba36e6b527031..aa73a2c629515 100644 --- a/server/src/main/java/org/opensearch/index/store/StoreStats.java +++ b/server/src/main/java/org/opensearch/index/store/StoreStats.java @@ -35,7 +35,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCache.java b/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCache.java index 47b891fdb8d21..45d60c8ad84db 100644 --- a/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCache.java +++ b/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCache.java @@ -9,8 +9,8 @@ package org.opensearch.index.store.remote.filecache; import org.apache.lucene.store.IndexInput; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.common.settings.Setting; import org.opensearch.index.store.remote.utils.cache.CacheUsage; import org.opensearch.index.store.remote.utils.cache.RefCountedCache; diff --git a/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheFactory.java b/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheFactory.java index f23e057196096..9e8b2ee7d1938 100644 --- a/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheFactory.java +++ b/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheFactory.java @@ -8,7 +8,7 @@ package org.opensearch.index.store.remote.filecache; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.cache.RemovalReason; import org.opensearch.index.store.remote.utils.cache.SegmentedCache; diff --git a/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheStats.java b/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheStats.java index 46a81adc1ab45..ebb6202c86ec9 100644 --- a/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheStats.java +++ b/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheStats.java @@ -11,7 +11,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/index/translog/TranslogConfig.java b/server/src/main/java/org/opensearch/index/translog/TranslogConfig.java index 2862accfedc43..e02381c3f97f3 100644 --- a/server/src/main/java/org/opensearch/index/translog/TranslogConfig.java +++ b/server/src/main/java/org/opensearch/index/translog/TranslogConfig.java @@ -32,8 +32,8 @@ package org.opensearch.index.translog; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.BigArrays; import org.opensearch.index.IndexSettings; import org.opensearch.core.index.shard.ShardId; diff --git a/server/src/main/java/org/opensearch/index/translog/TranslogStats.java b/server/src/main/java/org/opensearch/index/translog/TranslogStats.java index c740e29963c4d..935f29a428877 100644 --- a/server/src/main/java/org/opensearch/index/translog/TranslogStats.java +++ b/server/src/main/java/org/opensearch/index/translog/TranslogStats.java @@ -35,7 +35,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java b/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java index dd85f8f1f77ea..f3108444688f8 100644 --- a/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java +++ b/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java @@ -44,7 +44,7 @@ import org.opensearch.common.io.Channels; import org.opensearch.common.io.DiskIoBufferPool; import org.opensearch.common.io.stream.ReleasableBytesStreamOutput; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.concurrent.ReleasableLock; import org.opensearch.common.util.io.IOUtils; diff --git a/server/src/main/java/org/opensearch/indices/IndexingMemoryController.java b/server/src/main/java/org/opensearch/indices/IndexingMemoryController.java index 9a87f1c26fa29..fc580d8517706 100644 --- a/server/src/main/java/org/opensearch/indices/IndexingMemoryController.java +++ b/server/src/main/java/org/opensearch/indices/IndexingMemoryController.java @@ -39,8 +39,8 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.index.engine.Engine; diff --git a/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java b/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java index 6ebed17437074..8f7df5157d673 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java +++ b/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java @@ -48,7 +48,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.index.cache.query.QueryCacheStats; import org.opensearch.core.index.shard.ShardId; diff --git a/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java b/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java index c06880db42587..461747a21f3a7 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java +++ b/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java @@ -49,7 +49,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ConcurrentCollections; diff --git a/server/src/main/java/org/opensearch/indices/IndicesService.java b/server/src/main/java/org/opensearch/indices/IndicesService.java index f4841a8ca9a28..36b937e7df76a 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesService.java +++ b/server/src/main/java/org/opensearch/indices/IndicesService.java @@ -60,9 +60,9 @@ import org.opensearch.common.CheckedFunction; import org.opensearch.common.CheckedSupplier; import org.opensearch.common.Nullable; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; @@ -72,7 +72,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.concurrent.AbstractRefCounted; @@ -137,7 +137,7 @@ import org.opensearch.index.translog.RemoteBlobStoreInternalTranslogFactory; import org.opensearch.index.translog.TranslogFactory; import org.opensearch.index.translog.TranslogStats; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.indices.cluster.IndicesClusterStateService; import org.opensearch.indices.fielddata.cache.IndicesFieldDataCache; import org.opensearch.indices.mapper.MapperRegistry; diff --git a/server/src/main/java/org/opensearch/indices/breaker/BreakerSettings.java b/server/src/main/java/org/opensearch/indices/breaker/BreakerSettings.java index a421253227dbf..32dfb2c1c7529 100644 --- a/server/src/main/java/org/opensearch/indices/breaker/BreakerSettings.java +++ b/server/src/main/java/org/opensearch/indices/breaker/BreakerSettings.java @@ -32,10 +32,10 @@ package org.opensearch.indices.breaker; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; /** * Settings for a {@link CircuitBreaker} diff --git a/server/src/main/java/org/opensearch/indices/breaker/HierarchyCircuitBreakerService.java b/server/src/main/java/org/opensearch/indices/breaker/HierarchyCircuitBreakerService.java index 40bb4894c7397..34cadd2c828d0 100644 --- a/server/src/main/java/org/opensearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/server/src/main/java/org/opensearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -37,17 +37,20 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.opensearch.common.Booleans; import org.opensearch.common.breaker.ChildMemoryCircuitBreaker; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ReleasableLock; +import org.opensearch.core.indices.breaker.AllCircuitBreakerStats; +import org.opensearch.core.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerStats; import org.opensearch.monitor.jvm.GcNames; import org.opensearch.monitor.jvm.JvmInfo; diff --git a/server/src/main/java/org/opensearch/indices/cluster/IndicesClusterStateService.java b/server/src/main/java/org/opensearch/indices/cluster/IndicesClusterStateService.java index b3fc070d62e58..8f2b6cfde7a34 100644 --- a/server/src/main/java/org/opensearch/indices/cluster/IndicesClusterStateService.java +++ b/server/src/main/java/org/opensearch/indices/cluster/IndicesClusterStateService.java @@ -52,7 +52,7 @@ import org.opensearch.cluster.routing.RecoverySource.Type; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.Nullable; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.inject.Inject; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; diff --git a/server/src/main/java/org/opensearch/indices/fielddata/cache/IndicesFieldDataCache.java b/server/src/main/java/org/opensearch/indices/fielddata/cache/IndicesFieldDataCache.java index dc18097419904..68364d2c2622a 100644 --- a/server/src/main/java/org/opensearch/indices/fielddata/cache/IndicesFieldDataCache.java +++ b/server/src/main/java/org/opensearch/indices/fielddata/cache/IndicesFieldDataCache.java @@ -51,7 +51,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.lease.Releasable; import org.opensearch.core.index.Index; import org.opensearch.index.fielddata.LeafFieldData; diff --git a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoverySourceService.java b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoverySourceService.java index 66b5b3f8b7535..1ff1844d9e6ac 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoverySourceService.java +++ b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoverySourceService.java @@ -46,7 +46,7 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.Nullable; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.inject.Inject; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.FutureUtils; diff --git a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java index a289c8f8a04b7..0ba57a9ee7f65 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java @@ -49,7 +49,7 @@ import org.opensearch.common.Nullable; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.CancellableThreads; import org.opensearch.common.util.concurrent.AbstractRunnable; diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoverFilesRecoveryException.java b/server/src/main/java/org/opensearch/indices/recovery/RecoverFilesRecoveryException.java index 8d60b44c2c6bd..12e628b9b7e7d 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoverFilesRecoveryException.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoverFilesRecoveryException.java @@ -36,7 +36,7 @@ import org.opensearch.OpenSearchWrapperException; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.index.shard.ShardId; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoverySettings.java b/server/src/main/java/org/opensearch/indices/recovery/RecoverySettings.java index 60076f1668af8..a4679a2dbae78 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoverySettings.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoverySettings.java @@ -40,8 +40,8 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; /** diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java index 0b343fb0b0871..f8ba63def6239 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java @@ -49,7 +49,7 @@ import org.opensearch.common.StopWatch; import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.logging.Loggers; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.CancellableThreads; import org.opensearch.common.util.concurrent.FutureUtils; diff --git a/server/src/main/java/org/opensearch/indices/recovery/RetryableTransportClient.java b/server/src/main/java/org/opensearch/indices/recovery/RetryableTransportClient.java index 04bee5586cbab..4f1fb42a421f6 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RetryableTransportClient.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RetryableTransportClient.java @@ -14,7 +14,7 @@ import org.opensearch.action.ActionListenerResponseHandler; import org.opensearch.action.support.RetryableAction; import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.CancellableThreads; diff --git a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationSourceService.java b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationSourceService.java index 9d8b30209ab80..8bb2a61e32e2d 100644 --- a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationSourceService.java +++ b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationSourceService.java @@ -19,7 +19,7 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.Nullable; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.settings.Settings; import org.opensearch.index.IndexService; import org.opensearch.index.shard.IndexEventListener; diff --git a/server/src/main/java/org/opensearch/indices/replication/common/ReplicationLuceneIndex.java b/server/src/main/java/org/opensearch/indices/replication/common/ReplicationLuceneIndex.java index 9a9690581ed24..894ddc86f9fde 100644 --- a/server/src/main/java/org/opensearch/indices/replication/common/ReplicationLuceneIndex.java +++ b/server/src/main/java/org/opensearch/indices/replication/common/ReplicationLuceneIndex.java @@ -12,7 +12,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.ToXContentObject; diff --git a/server/src/main/java/org/opensearch/monitor/MonitorService.java b/server/src/main/java/org/opensearch/monitor/MonitorService.java index bed638484f7c0..ad02b18366b98 100644 --- a/server/src/main/java/org/opensearch/monitor/MonitorService.java +++ b/server/src/main/java/org/opensearch/monitor/MonitorService.java @@ -32,7 +32,7 @@ package org.opensearch.monitor; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.settings.Settings; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.store.remote.filecache.FileCache; diff --git a/server/src/main/java/org/opensearch/monitor/fs/FsHealthService.java b/server/src/main/java/org/opensearch/monitor/fs/FsHealthService.java index 85a44aa905e08..d8090ada56b93 100644 --- a/server/src/main/java/org/opensearch/monitor/fs/FsHealthService.java +++ b/server/src/main/java/org/opensearch/monitor/fs/FsHealthService.java @@ -38,7 +38,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.UUIDs; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; diff --git a/server/src/main/java/org/opensearch/monitor/fs/FsInfo.java b/server/src/main/java/org/opensearch/monitor/fs/FsInfo.java index cfa29436f24a5..114702ff0d351 100644 --- a/server/src/main/java/org/opensearch/monitor/fs/FsInfo.java +++ b/server/src/main/java/org/opensearch/monitor/fs/FsInfo.java @@ -37,7 +37,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/monitor/fs/FsProbe.java b/server/src/main/java/org/opensearch/monitor/fs/FsProbe.java index 50d1d981f3c98..e20d84cd9763e 100644 --- a/server/src/main/java/org/opensearch/monitor/fs/FsProbe.java +++ b/server/src/main/java/org/opensearch/monitor/fs/FsProbe.java @@ -39,7 +39,7 @@ import org.opensearch.common.SuppressForbidden; import org.opensearch.common.collect.Tuple; import org.opensearch.common.io.PathUtils; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.env.NodeEnvironment; import org.opensearch.env.NodeEnvironment.NodePath; import org.opensearch.index.store.remote.filecache.FileCache; diff --git a/server/src/main/java/org/opensearch/monitor/jvm/JvmGcMonitorService.java b/server/src/main/java/org/opensearch/monitor/jvm/JvmGcMonitorService.java index 7bdc74d9ae574..78379db6e2cff 100644 --- a/server/src/main/java/org/opensearch/monitor/jvm/JvmGcMonitorService.java +++ b/server/src/main/java/org/opensearch/monitor/jvm/JvmGcMonitorService.java @@ -34,11 +34,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.monitor.jvm.JvmStats.GarbageCollector; import org.opensearch.threadpool.Scheduler.Cancellable; diff --git a/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java b/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java index 4ce7013e1052f..9eafd80203a87 100644 --- a/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java +++ b/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java @@ -39,7 +39,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.node.ReportingService; diff --git a/server/src/main/java/org/opensearch/monitor/jvm/JvmStats.java b/server/src/main/java/org/opensearch/monitor/jvm/JvmStats.java index 5451821a565cd..b429de614f250 100644 --- a/server/src/main/java/org/opensearch/monitor/jvm/JvmStats.java +++ b/server/src/main/java/org/opensearch/monitor/jvm/JvmStats.java @@ -35,7 +35,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/monitor/os/OsStats.java b/server/src/main/java/org/opensearch/monitor/os/OsStats.java index c684bf10c4a03..697b86c6ba55c 100644 --- a/server/src/main/java/org/opensearch/monitor/os/OsStats.java +++ b/server/src/main/java/org/opensearch/monitor/os/OsStats.java @@ -37,7 +37,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/monitor/process/ProcessStats.java b/server/src/main/java/org/opensearch/monitor/process/ProcessStats.java index de49c07a5600f..9ab38b41a4648 100644 --- a/server/src/main/java/org/opensearch/monitor/process/ProcessStats.java +++ b/server/src/main/java/org/opensearch/monitor/process/ProcessStats.java @@ -35,7 +35,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 0bc824c5a0704..d8b51b0184a66 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -38,8 +38,8 @@ import org.opensearch.ExceptionsHelper; import org.opensearch.common.SetOnce; import org.opensearch.common.settings.SettingsException; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.FeatureFlags; import org.opensearch.cluster.routing.allocation.AwarenessReplicaBalance; import org.opensearch.index.IndexModule; @@ -112,9 +112,9 @@ import org.opensearch.cluster.routing.allocation.DiskThresholdMonitor; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.StopWatch; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.component.Lifecycle; -import org.opensearch.common.component.LifecycleComponent; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.common.lifecycle.Lifecycle; +import org.opensearch.common.lifecycle.LifecycleComponent; import org.opensearch.common.inject.Injector; import org.opensearch.common.inject.Key; import org.opensearch.common.inject.Module; @@ -164,9 +164,9 @@ import org.opensearch.indices.SystemIndices; import org.opensearch.indices.analysis.AnalysisModule; import org.opensearch.indices.breaker.BreakerSettings; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.indices.cluster.IndicesClusterStateService; import org.opensearch.indices.recovery.PeerRecoverySourceService; import org.opensearch.indices.recovery.PeerRecoveryTargetService; diff --git a/server/src/main/java/org/opensearch/node/NodeService.java b/server/src/main/java/org/opensearch/node/NodeService.java index 6f4fe1e083ad7..95b3825cd992d 100644 --- a/server/src/main/java/org/opensearch/node/NodeService.java +++ b/server/src/main/java/org/opensearch/node/NodeService.java @@ -49,7 +49,7 @@ import org.opensearch.index.IndexingPressureService; import org.opensearch.index.store.remote.filecache.FileCache; import org.opensearch.indices.IndicesService; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.ingest.IngestService; import org.opensearch.monitor.MonitorService; import org.opensearch.plugins.PluginsService; diff --git a/server/src/main/java/org/opensearch/plugins/CircuitBreakerPlugin.java b/server/src/main/java/org/opensearch/plugins/CircuitBreakerPlugin.java index f52aab5b238b3..ab21d33700d54 100644 --- a/server/src/main/java/org/opensearch/plugins/CircuitBreakerPlugin.java +++ b/server/src/main/java/org/opensearch/plugins/CircuitBreakerPlugin.java @@ -32,10 +32,10 @@ package org.opensearch.plugins; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.settings.Settings; import org.opensearch.indices.breaker.BreakerSettings; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; /** * An extension point for {@link Plugin} implementations to add custom circuit breakers diff --git a/server/src/main/java/org/opensearch/plugins/NetworkPlugin.java b/server/src/main/java/org/opensearch/plugins/NetworkPlugin.java index 8ea8333f4851a..fac29b39ec4c5 100644 --- a/server/src/main/java/org/opensearch/plugins/NetworkPlugin.java +++ b/server/src/main/java/org/opensearch/plugins/NetworkPlugin.java @@ -45,7 +45,7 @@ import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.http.HttpServerTransport; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.Transport; import org.opensearch.transport.TransportInterceptor; diff --git a/server/src/main/java/org/opensearch/plugins/Plugin.java b/server/src/main/java/org/opensearch/plugins/Plugin.java index 02a46f44ef23b..e8c479a48d82b 100644 --- a/server/src/main/java/org/opensearch/plugins/Plugin.java +++ b/server/src/main/java/org/opensearch/plugins/Plugin.java @@ -41,7 +41,7 @@ import org.opensearch.cluster.metadata.Metadata; import org.opensearch.cluster.node.DiscoveryNodeRole; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.component.LifecycleComponent; +import org.opensearch.common.lifecycle.LifecycleComponent; import org.opensearch.common.inject.Module; import org.opensearch.core.common.io.stream.NamedWriteable; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; diff --git a/server/src/main/java/org/opensearch/plugins/PluginsService.java b/server/src/main/java/org/opensearch/plugins/PluginsService.java index 3cd226c357367..67ce664c629ec 100644 --- a/server/src/main/java/org/opensearch/plugins/PluginsService.java +++ b/server/src/main/java/org/opensearch/plugins/PluginsService.java @@ -44,7 +44,7 @@ import org.opensearch.action.admin.cluster.node.info.PluginsAndModules; import org.opensearch.bootstrap.JarHell; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.component.LifecycleComponent; +import org.opensearch.common.lifecycle.LifecycleComponent; import org.opensearch.common.inject.Module; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; diff --git a/server/src/main/java/org/opensearch/repositories/FilterRepository.java b/server/src/main/java/org/opensearch/repositories/FilterRepository.java index 462d105dc0c68..52e7f374507d4 100644 --- a/server/src/main/java/org/opensearch/repositories/FilterRepository.java +++ b/server/src/main/java/org/opensearch/repositories/FilterRepository.java @@ -40,8 +40,8 @@ import org.opensearch.cluster.metadata.Metadata; import org.opensearch.cluster.metadata.RepositoryMetadata; import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.component.Lifecycle; -import org.opensearch.common.component.LifecycleListener; +import org.opensearch.common.lifecycle.Lifecycle; +import org.opensearch.common.lifecycle.LifecycleListener; import org.opensearch.index.mapper.MapperService; import org.opensearch.core.index.shard.ShardId; import org.opensearch.index.snapshots.IndexShardSnapshotStatus; diff --git a/server/src/main/java/org/opensearch/repositories/RepositoriesService.java b/server/src/main/java/org/opensearch/repositories/RepositoriesService.java index e7f7a1d9c0554..e66f8ddee5678 100644 --- a/server/src/main/java/org/opensearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/opensearch/repositories/RepositoriesService.java @@ -58,7 +58,7 @@ import org.opensearch.cluster.service.ClusterManagerTaskThrottler; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.Strings; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.regex.Regex; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; diff --git a/server/src/main/java/org/opensearch/repositories/Repository.java b/server/src/main/java/org/opensearch/repositories/Repository.java index 8a712b0a0c098..683177b3c3639 100644 --- a/server/src/main/java/org/opensearch/repositories/Repository.java +++ b/server/src/main/java/org/opensearch/repositories/Repository.java @@ -42,7 +42,7 @@ import org.opensearch.cluster.metadata.RepositoryMetadata; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.Nullable; -import org.opensearch.common.component.LifecycleComponent; +import org.opensearch.common.lifecycle.LifecycleComponent; import org.opensearch.index.mapper.MapperService; import org.opensearch.core.index.shard.ShardId; import org.opensearch.index.snapshots.IndexShardSnapshotStatus; diff --git a/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java index ae8080da73fa8..70db2e0c0a9bd 100644 --- a/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java @@ -78,7 +78,7 @@ import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.compress.Compressor; import org.opensearch.common.compress.CompressorFactory; import org.opensearch.common.compress.CompressorType; @@ -89,8 +89,8 @@ import org.opensearch.common.metrics.CounterMetric; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.concurrent.ConcurrentCollections; diff --git a/server/src/main/java/org/opensearch/repositories/fs/FsRepository.java b/server/src/main/java/org/opensearch/repositories/fs/FsRepository.java index 0b9989ff64d9c..6971d8ec00ecf 100644 --- a/server/src/main/java/org/opensearch/repositories/fs/FsRepository.java +++ b/server/src/main/java/org/opensearch/repositories/fs/FsRepository.java @@ -41,7 +41,7 @@ import org.opensearch.common.blobstore.fs.FsBlobStore; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; diff --git a/server/src/main/java/org/opensearch/rest/RestController.java b/server/src/main/java/org/opensearch/rest/RestController.java index a2938810db1c2..1a8bddc094d26 100644 --- a/server/src/main/java/org/opensearch/rest/RestController.java +++ b/server/src/main/java/org/opensearch/rest/RestController.java @@ -38,7 +38,7 @@ import org.opensearch.OpenSearchException; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Nullable; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamOutput; @@ -57,7 +57,7 @@ import org.opensearch.identity.Subject; import org.opensearch.identity.tokens.AuthToken; import org.opensearch.identity.tokens.RestTokenExtractor; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.usage.UsageService; import java.io.ByteArrayOutputStream; diff --git a/server/src/main/java/org/opensearch/rest/RestRequest.java b/server/src/main/java/org/opensearch/rest/RestRequest.java index 7382701b6f787..fb497a17cde72 100644 --- a/server/src/main/java/org/opensearch/rest/RestRequest.java +++ b/server/src/main/java/org/opensearch/rest/RestRequest.java @@ -41,7 +41,7 @@ import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.core.xcontent.MediaType; @@ -65,7 +65,7 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; -import static org.opensearch.common.unit.ByteSizeValue.parseBytesSizeValue; +import static org.opensearch.core.common.unit.ByteSizeValue.parseBytesSizeValue; import static org.opensearch.common.unit.TimeValue.parseTimeValue; /** diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestAllocationAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestAllocationAction.java index 702ffff1c9330..176e37abc0f65 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestAllocationAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestAllocationAction.java @@ -43,7 +43,7 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.common.Table; import org.opensearch.common.logging.DeprecationLogger; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.common.Strings; import org.opensearch.rest.RestRequest; import org.opensearch.rest.RestResponse; diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestCatSegmentReplicationAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestCatSegmentReplicationAction.java index 52890274d4198..2dfed426f35d6 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestCatSegmentReplicationAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestCatSegmentReplicationAction.java @@ -14,7 +14,7 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Table; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentOpenSearchExtension; import org.opensearch.core.common.Strings; diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestFielddataAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestFielddataAction.java index a04bac0c30bc9..04bbdeeadc4c4 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestFielddataAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestFielddataAction.java @@ -37,7 +37,7 @@ import org.opensearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Table; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.rest.RestRequest; import org.opensearch.rest.RestResponse; import org.opensearch.rest.action.RestResponseListener; diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java index 6346e5d23cd34..b54c8955283a2 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java @@ -49,7 +49,7 @@ import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.network.NetworkAddress; import org.opensearch.common.transport.TransportAddress; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.http.HttpInfo; import org.opensearch.index.cache.query.QueryCacheStats; import org.opensearch.index.cache.request.RequestCacheStats; diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestTable.java b/server/src/main/java/org/opensearch/rest/action/cat/RestTable.java index 6a5d4e40eb452..ec44b06318695 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestTable.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestTable.java @@ -38,7 +38,7 @@ import org.opensearch.common.io.UTF8StreamWriter; import org.opensearch.core.common.io.stream.BytesStream; import org.opensearch.common.regex.Regex; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.SizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.common.Strings; diff --git a/server/src/main/java/org/opensearch/script/ScriptCache.java b/server/src/main/java/org/opensearch/script/ScriptCache.java index 874888b53b978..439953f816cbb 100644 --- a/server/src/main/java/org/opensearch/script/ScriptCache.java +++ b/server/src/main/java/org/opensearch/script/ScriptCache.java @@ -34,8 +34,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.common.cache.Cache; import org.opensearch.common.cache.CacheBuilder; import org.opensearch.common.cache.RemovalListener; diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index 0259731992f2d..b244290e8ae74 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -53,8 +53,8 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.CheckedSupplier; import org.opensearch.common.UUIDs; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.lucene.Lucene; @@ -87,7 +87,7 @@ import org.opensearch.index.shard.SearchOperationListener; import org.opensearch.core.index.shard.ShardId; import org.opensearch.indices.IndicesService; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason; import org.opensearch.node.ResponseCollectorService; import org.opensearch.script.FieldScript; diff --git a/server/src/main/java/org/opensearch/search/aggregations/AggregatorBase.java b/server/src/main/java/org/opensearch/search/aggregations/AggregatorBase.java index 1d315980512b4..392c65ce27aea 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/AggregatorBase.java +++ b/server/src/main/java/org/opensearch/search/aggregations/AggregatorBase.java @@ -34,9 +34,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.ScoreMode; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.search.SearchShardTarget; import org.opensearch.search.aggregations.support.ValuesSourceConfig; import org.opensearch.search.internal.SearchContext; diff --git a/server/src/main/java/org/opensearch/search/aggregations/MultiBucketConsumerService.java b/server/src/main/java/org/opensearch/search/aggregations/MultiBucketConsumerService.java index 83a94afbe8e02..f1416fddebfa2 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/MultiBucketConsumerService.java +++ b/server/src/main/java/org/opensearch/search/aggregations/MultiBucketConsumerService.java @@ -32,7 +32,7 @@ package org.opensearch.search.aggregations; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Setting; diff --git a/server/src/main/java/org/opensearch/search/backpressure/SearchBackpressureService.java b/server/src/main/java/org/opensearch/search/backpressure/SearchBackpressureService.java index 246078e7a8eda..4f6c2c327509d 100644 --- a/server/src/main/java/org/opensearch/search/backpressure/SearchBackpressureService.java +++ b/server/src/main/java/org/opensearch/search/backpressure/SearchBackpressureService.java @@ -13,7 +13,7 @@ import org.opensearch.ExceptionsHelper; import org.opensearch.action.search.SearchShardTask; import org.opensearch.action.search.SearchTask; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.monitor.jvm.JvmStats; diff --git a/server/src/main/java/org/opensearch/search/backpressure/trackers/HeapUsageTracker.java b/server/src/main/java/org/opensearch/search/backpressure/trackers/HeapUsageTracker.java index fbd2155c0ef89..ed2ae0bde86c3 100644 --- a/server/src/main/java/org/opensearch/search/backpressure/trackers/HeapUsageTracker.java +++ b/server/src/main/java/org/opensearch/search/backpressure/trackers/HeapUsageTracker.java @@ -14,7 +14,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Setting; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.MovingAverage; import org.opensearch.monitor.jvm.JvmStats; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/search/suggest/completion/CompletionStats.java b/server/src/main/java/org/opensearch/search/suggest/completion/CompletionStats.java index 2ca642292a7c0..b54f988218ac8 100644 --- a/server/src/main/java/org/opensearch/search/suggest/completion/CompletionStats.java +++ b/server/src/main/java/org/opensearch/search/suggest/completion/CompletionStats.java @@ -36,7 +36,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java index 0fcd3761c98ba..d8f9b8e1b984f 100644 --- a/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java @@ -47,7 +47,7 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.Nullable; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; diff --git a/server/src/main/java/org/opensearch/snapshots/SnapshotsService.java b/server/src/main/java/org/opensearch/snapshots/SnapshotsService.java index cd9c2c4ce325a..2772f061b4c4a 100644 --- a/server/src/main/java/org/opensearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/opensearch/snapshots/SnapshotsService.java @@ -82,7 +82,7 @@ import org.opensearch.common.Strings; import org.opensearch.common.UUIDs; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.regex.Regex; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; diff --git a/server/src/main/java/org/opensearch/tasks/TaskCancellationMonitoringService.java b/server/src/main/java/org/opensearch/tasks/TaskCancellationMonitoringService.java index 5b512af56e195..343d4571593a7 100644 --- a/server/src/main/java/org/opensearch/tasks/TaskCancellationMonitoringService.java +++ b/server/src/main/java/org/opensearch/tasks/TaskCancellationMonitoringService.java @@ -11,7 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.action.search.SearchShardTask; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.metrics.CounterMetric; import org.opensearch.threadpool.Scheduler; import org.opensearch.threadpool.ThreadPool; diff --git a/server/src/main/java/org/opensearch/tasks/TaskManager.java b/server/src/main/java/org/opensearch/tasks/TaskManager.java index bcd3004188bb6..ec59a13dd1999 100644 --- a/server/src/main/java/org/opensearch/tasks/TaskManager.java +++ b/server/src/main/java/org/opensearch/tasks/TaskManager.java @@ -50,7 +50,7 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.concurrent.ConcurrentCollections; diff --git a/server/src/main/java/org/opensearch/transport/InboundAggregator.java b/server/src/main/java/org/opensearch/transport/InboundAggregator.java index 60d45c8b3ad2b..060ccec3563e2 100644 --- a/server/src/main/java/org/opensearch/transport/InboundAggregator.java +++ b/server/src/main/java/org/opensearch/transport/InboundAggregator.java @@ -32,8 +32,8 @@ package org.opensearch.transport; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.bytes.CompositeBytesReference; diff --git a/server/src/main/java/org/opensearch/transport/InboundPipeline.java b/server/src/main/java/org/opensearch/transport/InboundPipeline.java index 4a4f928860819..9103692e358e4 100644 --- a/server/src/main/java/org/opensearch/transport/InboundPipeline.java +++ b/server/src/main/java/org/opensearch/transport/InboundPipeline.java @@ -33,7 +33,7 @@ package org.opensearch.transport; import org.opensearch.Version; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.bytes.CompositeBytesReference; import org.opensearch.common.bytes.ReleasableBytesReference; import org.opensearch.common.util.PageCacheRecycler; diff --git a/server/src/main/java/org/opensearch/transport/TcpTransport.java b/server/src/main/java/org/opensearch/transport/TcpTransport.java index 75676307f4026..8733cb5fe7e8e 100644 --- a/server/src/main/java/org/opensearch/transport/TcpTransport.java +++ b/server/src/main/java/org/opensearch/transport/TcpTransport.java @@ -40,11 +40,11 @@ import org.opensearch.action.support.ThreadedActionListener; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.Booleans; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.common.component.AbstractLifecycleComponent; -import org.opensearch.common.component.Lifecycle; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.Lifecycle; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.metrics.MeanMetric; @@ -57,14 +57,14 @@ import org.opensearch.common.transport.BoundTransportAddress; import org.opensearch.common.transport.PortsRange; import org.opensearch.common.transport.TransportAddress; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.common.util.concurrent.CountDown; import org.opensearch.core.common.Strings; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.monitor.jvm.JvmInfo; import org.opensearch.node.Node; import org.opensearch.core.rest.RestStatus; diff --git a/server/src/main/java/org/opensearch/transport/Transport.java b/server/src/main/java/org/opensearch/transport/Transport.java index fbf6b3ef350d8..3bf855f847685 100644 --- a/server/src/main/java/org/opensearch/transport/Transport.java +++ b/server/src/main/java/org/opensearch/transport/Transport.java @@ -36,7 +36,7 @@ import org.opensearch.action.ActionListener; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.collect.MapBuilder; -import org.opensearch.common.component.LifecycleComponent; +import org.opensearch.common.lifecycle.LifecycleComponent; import org.opensearch.common.transport.BoundTransportAddress; import org.opensearch.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; diff --git a/server/src/main/java/org/opensearch/transport/TransportKeepAlive.java b/server/src/main/java/org/opensearch/transport/TransportKeepAlive.java index e48267d943739..5b542e4080641 100644 --- a/server/src/main/java/org/opensearch/transport/TransportKeepAlive.java +++ b/server/src/main/java/org/opensearch/transport/TransportKeepAlive.java @@ -37,7 +37,7 @@ import org.opensearch.action.ActionListener; import org.opensearch.common.AsyncBiFunction; import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.common.component.Lifecycle; +import org.opensearch.common.lifecycle.Lifecycle; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.metrics.CounterMetric; import org.opensearch.common.unit.TimeValue; diff --git a/server/src/main/java/org/opensearch/transport/TransportService.java b/server/src/main/java/org/opensearch/transport/TransportService.java index 25293bf97b222..3b59c99c03d3b 100644 --- a/server/src/main/java/org/opensearch/transport/TransportService.java +++ b/server/src/main/java/org/opensearch/transport/TransportService.java @@ -43,7 +43,7 @@ import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.Nullable; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Streamables; diff --git a/server/src/main/java/org/opensearch/transport/TransportSettings.java b/server/src/main/java/org/opensearch/transport/TransportSettings.java index b4d72ab40409b..05887f6f6d131 100644 --- a/server/src/main/java/org/opensearch/transport/TransportSettings.java +++ b/server/src/main/java/org/opensearch/transport/TransportSettings.java @@ -35,7 +35,7 @@ import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import java.util.Arrays; diff --git a/server/src/main/java/org/opensearch/transport/TransportStats.java b/server/src/main/java/org/opensearch/transport/TransportStats.java index 29544754a5729..e3c4773f4a472 100644 --- a/server/src/main/java/org/opensearch/transport/TransportStats.java +++ b/server/src/main/java/org/opensearch/transport/TransportStats.java @@ -35,7 +35,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/test/java/org/opensearch/ExceptionSerializationTests.java b/server/src/test/java/org/opensearch/ExceptionSerializationTests.java index 2656810af0379..629fe9ebd4b99 100644 --- a/server/src/test/java/org/opensearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/opensearch/ExceptionSerializationTests.java @@ -64,8 +64,8 @@ import org.opensearch.core.common.ParsingException; import org.opensearch.common.Strings; import org.opensearch.common.UUIDs; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.common.collect.Tuple; import org.opensearch.common.io.PathUtils; import org.opensearch.common.io.stream.BytesStreamOutput; @@ -73,7 +73,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.transport.TransportAddress; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.CancellableThreadsTests; import org.opensearch.common.util.set.Sets; import org.opensearch.common.xcontent.XContentType; @@ -852,7 +852,7 @@ public void testIds() { ids.put(130, org.opensearch.action.NoShardAvailableActionException.class); ids.put(131, org.opensearch.action.UnavailableShardsException.class); ids.put(132, org.opensearch.index.engine.FlushFailedEngineException.class); - ids.put(133, org.opensearch.common.breaker.CircuitBreakingException.class); + ids.put(133, org.opensearch.core.common.breaker.CircuitBreakingException.class); ids.put(134, org.opensearch.transport.NodeNotConnectedException.class); ids.put(135, org.opensearch.index.mapper.StrictDynamicMappingException.class); ids.put(136, org.opensearch.action.support.replication.TransportReplicationAction.RetryOnReplicaException.class); diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/node/stats/NodeStatsTests.java index cbf7032b50ca5..32f27f3a60e42 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -42,8 +42,8 @@ import org.opensearch.cluster.coordination.PendingClusterStateStats; import org.opensearch.cluster.coordination.PublishClusterStateStats; import org.opensearch.http.HttpStats; -import org.opensearch.indices.breaker.AllCircuitBreakerStats; -import org.opensearch.indices.breaker.CircuitBreakerStats; +import org.opensearch.core.indices.breaker.AllCircuitBreakerStats; +import org.opensearch.core.indices.breaker.CircuitBreakerStats; import org.opensearch.ingest.IngestStats; import org.opensearch.monitor.fs.FsInfo; import org.opensearch.monitor.jvm.JvmStats; diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index abbaf3a3f7b96..e0676cc9ddbdd 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -56,7 +56,7 @@ import org.opensearch.common.transport.BoundTransportAddress; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.common.lease.Releasable; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.tasks.TaskCancellationService; import org.opensearch.tasks.TaskManager; import org.opensearch.tasks.TaskResourceTrackingService; diff --git a/server/src/test/java/org/opensearch/action/admin/indices/cache/clear/TransportClearIndicesCacheActionTests.java b/server/src/test/java/org/opensearch/action/admin/indices/cache/clear/TransportClearIndicesCacheActionTests.java index 030ee6649bba5..f370528e6f770 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/cache/clear/TransportClearIndicesCacheActionTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/cache/clear/TransportClearIndicesCacheActionTests.java @@ -17,7 +17,7 @@ import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; import org.opensearch.env.NodeEnvironment; diff --git a/server/src/test/java/org/opensearch/action/admin/indices/datastream/DataStreamsStatsResponseTests.java b/server/src/test/java/org/opensearch/action/admin/indices/datastream/DataStreamsStatsResponseTests.java index 5e4e2346a6e5b..c83263e4e22c6 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/datastream/DataStreamsStatsResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/datastream/DataStreamsStatsResponseTests.java @@ -35,7 +35,7 @@ import org.opensearch.OpenSearchException; import org.opensearch.core.action.support.DefaultShardOperationFailedException; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.test.AbstractWireSerializingTestCase; import java.util.ArrayList; diff --git a/server/src/test/java/org/opensearch/action/admin/indices/rollover/ConditionTests.java b/server/src/test/java/org/opensearch/action/admin/indices/rollover/ConditionTests.java index db61cac534f89..b4512595a1dfe 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/rollover/ConditionTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/rollover/ConditionTests.java @@ -32,8 +32,8 @@ package org.opensearch.action.admin.indices.rollover; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.EqualsHashCodeTestUtils; diff --git a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java index 9de9e81ee9cae..f65d39583336b 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -40,8 +40,8 @@ import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverResponseTests.java b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverResponseTests.java index 233e6166d1caf..2406952af6a3b 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverResponseTests.java @@ -33,7 +33,7 @@ package org.opensearch.action.admin.indices.rollover; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.test.AbstractSerializingTestCase; diff --git a/server/src/test/java/org/opensearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/server/src/test/java/org/opensearch/action/admin/indices/rollover/TransportRolloverActionTests.java index 65d7c0fc62629..76ddb9facd192 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -59,8 +59,8 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.UUIDs; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.set.Sets; import org.opensearch.index.cache.query.QueryCacheStats; diff --git a/server/src/test/java/org/opensearch/action/admin/indices/shrink/ResizeRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/shrink/ResizeRequestTests.java index 3d21af584ea04..05922a6c7419b 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/shrink/ResizeRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/shrink/ResizeRequestTests.java @@ -39,7 +39,7 @@ import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.RandomCreateIndexGenerator; diff --git a/server/src/test/java/org/opensearch/action/admin/indices/shrink/TransportResizeActionTests.java b/server/src/test/java/org/opensearch/action/admin/indices/shrink/TransportResizeActionTests.java index ef49820192e9b..848df5f8e4979 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/shrink/TransportResizeActionTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/shrink/TransportResizeActionTests.java @@ -52,7 +52,7 @@ import org.opensearch.cluster.routing.allocation.decider.AllocationDeciders; import org.opensearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.index.shard.DocsStats; import org.opensearch.index.store.StoreStats; import org.opensearch.snapshots.EmptySnapshotsInfoService; diff --git a/server/src/test/java/org/opensearch/action/bulk/BulkProcessorTests.java b/server/src/test/java/org/opensearch/action/bulk/BulkProcessorTests.java index 40d0df61cbb9f..0111c8d6e3132 100644 --- a/server/src/test/java/org/opensearch/action/bulk/BulkProcessorTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/BulkProcessorTests.java @@ -38,8 +38,8 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.index.IndexRequest; import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/test/java/org/opensearch/action/index/IndexRequestTests.java b/server/src/test/java/org/opensearch/action/index/IndexRequestTests.java index bda9a49bf797b..c4d3b17de1a5a 100644 --- a/server/src/test/java/org/opensearch/action/index/IndexRequestTests.java +++ b/server/src/test/java/org/opensearch/action/index/IndexRequestTests.java @@ -39,7 +39,7 @@ import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.VersionType; import org.opensearch.index.seqno.SequenceNumbers; diff --git a/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java b/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java index 61dd2ac8c14ae..2e50c6225577d 100644 --- a/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java +++ b/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java @@ -64,7 +64,7 @@ import org.opensearch.index.translog.Translog; import org.opensearch.indices.IndicesService; import org.opensearch.indices.SystemIndices; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.tasks.Task; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.transport.MockTransportService; diff --git a/server/src/test/java/org/opensearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/opensearch/action/search/DfsQueryPhaseTests.java index db371eb9467ba..afb342967bd69 100644 --- a/server/src/test/java/org/opensearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/opensearch/action/search/DfsQueryPhaseTests.java @@ -38,8 +38,8 @@ import org.apache.lucene.search.TotalHits; import org.apache.lucene.tests.store.MockDirectoryWrapper; import org.opensearch.action.OriginalIndices; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; import org.opensearch.common.util.concurrent.AtomicArray; import org.opensearch.common.util.concurrent.OpenSearchExecutors; diff --git a/server/src/test/java/org/opensearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/opensearch/action/search/FetchSearchPhaseTests.java index 24a018d05a16a..bf18c22f9b7ee 100644 --- a/server/src/test/java/org/opensearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/opensearch/action/search/FetchSearchPhaseTests.java @@ -37,8 +37,8 @@ import org.apache.lucene.tests.store.MockDirectoryWrapper; import org.opensearch.action.OriginalIndices; import org.opensearch.common.UUIDs; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.core.index.shard.ShardId; diff --git a/server/src/test/java/org/opensearch/action/search/QueryPhaseResultConsumerTests.java b/server/src/test/java/org/opensearch/action/search/QueryPhaseResultConsumerTests.java index 27d04cba204fb..794554c1ec013 100644 --- a/server/src/test/java/org/opensearch/action/search/QueryPhaseResultConsumerTests.java +++ b/server/src/test/java/org/opensearch/action/search/QueryPhaseResultConsumerTests.java @@ -36,8 +36,8 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.opensearch.action.OriginalIndices; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.concurrent.OpenSearchExecutors; diff --git a/server/src/test/java/org/opensearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/opensearch/action/search/SearchPhaseControllerTests.java index 93f9f69e68de0..dc946c7963a17 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchPhaseControllerTests.java @@ -46,9 +46,9 @@ import org.apache.lucene.util.BytesRef; import org.opensearch.action.OriginalIndices; import org.opensearch.common.UUIDs; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; diff --git a/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java index 1b67d805fe814..d0a1307d33235 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -41,8 +41,8 @@ import org.opensearch.action.OriginalIndices; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.routing.GroupShardsIterator; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.OpenSearchExecutors; diff --git a/server/src/test/java/org/opensearch/action/support/replication/BroadcastReplicationTests.java b/server/src/test/java/org/opensearch/action/support/replication/BroadcastReplicationTests.java index 330d43e791555..9c02bacbadda2 100644 --- a/server/src/test/java/org/opensearch/action/support/replication/BroadcastReplicationTests.java +++ b/server/src/test/java/org/opensearch/action/support/replication/BroadcastReplicationTests.java @@ -58,8 +58,8 @@ import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.common.util.io.IOUtils; import org.opensearch.core.index.shard.ShardId; -import org.opensearch.indices.breaker.CircuitBreakerService; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.core.rest.RestStatus; import org.opensearch.tasks.Task; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/action/support/replication/ReplicationOperationTests.java b/server/src/test/java/org/opensearch/action/support/replication/ReplicationOperationTests.java index 78081a8f83ce3..21d14fb31059d 100644 --- a/server/src/test/java/org/opensearch/action/support/replication/ReplicationOperationTests.java +++ b/server/src/test/java/org/opensearch/action/support/replication/ReplicationOperationTests.java @@ -49,8 +49,8 @@ import org.opensearch.cluster.routing.IndexShardRoutingTable; import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.ShardRoutingState; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ConcurrentCollections; diff --git a/server/src/test/java/org/opensearch/action/support/replication/ReplicationResponseTests.java b/server/src/test/java/org/opensearch/action/support/replication/ReplicationResponseTests.java index 5e32193415bd8..46af25c49f262 100644 --- a/server/src/test/java/org/opensearch/action/support/replication/ReplicationResponseTests.java +++ b/server/src/test/java/org/opensearch/action/support/replication/ReplicationResponseTests.java @@ -35,8 +35,8 @@ import org.opensearch.OpenSearchException; import org.opensearch.action.support.replication.ReplicationResponse.ShardInfo; import org.opensearch.common.Strings; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.collect.Tuple; import org.opensearch.core.xcontent.ToXContent; diff --git a/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationActionTests.java b/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationActionTests.java index 089bcf77afbae..5af358cae18a8 100644 --- a/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationActionTests.java +++ b/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationActionTests.java @@ -89,7 +89,7 @@ import org.opensearch.index.shard.ShardNotInPrimaryModeException; import org.opensearch.indices.IndexClosedException; import org.opensearch.indices.IndicesService; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.indices.cluster.ClusterStateChanges; import org.opensearch.core.rest.RestStatus; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/cluster/NodeConnectionsServiceTests.java b/server/src/test/java/org/opensearch/cluster/NodeConnectionsServiceTests.java index 511c1555f1159..a284269dc4151 100644 --- a/server/src/test/java/org/opensearch/cluster/NodeConnectionsServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/NodeConnectionsServiceTests.java @@ -45,8 +45,8 @@ import org.opensearch.cluster.node.DiscoveryNodes; import org.opensearch.common.CheckedRunnable; import org.opensearch.common.UUIDs; -import org.opensearch.common.component.Lifecycle; -import org.opensearch.common.component.LifecycleListener; +import org.opensearch.common.lifecycle.Lifecycle; +import org.opensearch.common.lifecycle.LifecycleListener; import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.BoundTransportAddress; import org.opensearch.common.transport.TransportAddress; diff --git a/server/src/test/java/org/opensearch/cluster/metadata/IndexMetadataTests.java b/server/src/test/java/org/opensearch/cluster/metadata/IndexMetadataTests.java index ea615c7cfc09a..f0459240c2bbe 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/IndexMetadataTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/IndexMetadataTests.java @@ -43,7 +43,7 @@ import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.set.Sets; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/DiskThresholdSettingsTests.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/DiskThresholdSettingsTests.java index d23b079e35ef9..22f7b9ecd5206 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/DiskThresholdSettingsTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/DiskThresholdSettingsTests.java @@ -34,7 +34,7 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.test.OpenSearchTestCase; import java.util.Locale; diff --git a/server/src/test/java/org/opensearch/common/UUIDTests.java b/server/src/test/java/org/opensearch/common/UUIDTests.java index a71b68bfe859e..02595d19781ab 100644 --- a/server/src/test/java/org/opensearch/common/UUIDTests.java +++ b/server/src/test/java/org/opensearch/common/UUIDTests.java @@ -45,7 +45,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.util.TestUtil; import org.opensearch.common.lucene.Lucene; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.test.OpenSearchTestCase; import org.hamcrest.Matchers; diff --git a/server/src/test/java/org/opensearch/common/bytes/RecyclingBytesStreamOutputTests.java b/server/src/test/java/org/opensearch/common/bytes/RecyclingBytesStreamOutputTests.java index 920684f06f83c..ede5658f0decd 100644 --- a/server/src/test/java/org/opensearch/common/bytes/RecyclingBytesStreamOutputTests.java +++ b/server/src/test/java/org/opensearch/common/bytes/RecyclingBytesStreamOutputTests.java @@ -36,7 +36,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; diff --git a/server/src/test/java/org/opensearch/common/io/stream/ReleasableBytesStreamOutputTests.java b/server/src/test/java/org/opensearch/common/io/stream/ReleasableBytesStreamOutputTests.java index 20f202149b17d..92ed8fac5aae0 100644 --- a/server/src/test/java/org/opensearch/common/io/stream/ReleasableBytesStreamOutputTests.java +++ b/server/src/test/java/org/opensearch/common/io/stream/ReleasableBytesStreamOutputTests.java @@ -35,7 +35,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; diff --git a/server/src/test/java/org/opensearch/common/network/NetworkModuleTests.java b/server/src/test/java/org/opensearch/common/network/NetworkModuleTests.java index 48a8e5799993d..f795df2f48b22 100644 --- a/server/src/test/java/org/opensearch/common/network/NetworkModuleTests.java +++ b/server/src/test/java/org/opensearch/common/network/NetworkModuleTests.java @@ -32,7 +32,7 @@ package org.opensearch.common.network; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; @@ -45,7 +45,7 @@ import org.opensearch.http.HttpServerTransport; import org.opensearch.http.HttpStats; import org.opensearch.http.NullDispatcher; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.plugins.NetworkPlugin; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; diff --git a/server/src/test/java/org/opensearch/common/settings/MemorySizeSettingsTests.java b/server/src/test/java/org/opensearch/common/settings/MemorySizeSettingsTests.java index 2c7251818e2bc..94fb6b010309d 100644 --- a/server/src/test/java/org/opensearch/common/settings/MemorySizeSettingsTests.java +++ b/server/src/test/java/org/opensearch/common/settings/MemorySizeSettingsTests.java @@ -33,7 +33,7 @@ package org.opensearch.common.settings; import org.opensearch.common.settings.Setting.Property; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.indices.IndexingMemoryController; import org.opensearch.indices.IndicesQueryCache; diff --git a/server/src/test/java/org/opensearch/common/settings/SettingTests.java b/server/src/test/java/org/opensearch/common/settings/SettingTests.java index 3d5a5090cdc82..27f9c027e2641 100644 --- a/server/src/test/java/org/opensearch/common/settings/SettingTests.java +++ b/server/src/test/java/org/opensearch/common/settings/SettingTests.java @@ -51,8 +51,8 @@ import org.opensearch.common.settings.Setting.MinTimeValueParser; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Setting.RegexValidator; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.index.IndexSettings; import org.opensearch.monitor.jvm.JvmInfo; diff --git a/server/src/test/java/org/opensearch/common/settings/SettingsTests.java b/server/src/test/java/org/opensearch/common/settings/SettingsTests.java index 0c6352d118be2..1fd8658a09e67 100644 --- a/server/src/test/java/org/opensearch/common/settings/SettingsTests.java +++ b/server/src/test/java/org/opensearch/common/settings/SettingsTests.java @@ -38,8 +38,8 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.common.settings.SecureString; import org.opensearch.core.xcontent.ToXContent; diff --git a/server/src/test/java/org/opensearch/common/settings/WriteableSettingTests.java b/server/src/test/java/org/opensearch/common/settings/WriteableSettingTests.java index 804a53d687c95..33cfcfcd79d87 100644 --- a/server/src/test/java/org/opensearch/common/settings/WriteableSettingTests.java +++ b/server/src/test/java/org/opensearch/common/settings/WriteableSettingTests.java @@ -13,8 +13,8 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.BytesStreamInput; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/common/util/BigArraysTests.java b/server/src/test/java/org/opensearch/common/util/BigArraysTests.java index c973a76623452..22aff6c55d39a 100644 --- a/server/src/test/java/org/opensearch/common/util/BigArraysTests.java +++ b/server/src/test/java/org/opensearch/common/util/BigArraysTests.java @@ -33,15 +33,15 @@ package org.opensearch.common.util; import org.apache.lucene.util.BytesRef; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.core.common.util.BigArray; import org.opensearch.core.common.util.ByteArray; import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import org.junit.Before; diff --git a/server/src/test/java/org/opensearch/common/util/BinarySearcherTests.java b/server/src/test/java/org/opensearch/common/util/BinarySearcherTests.java index bc64a4980947c..98837091d9550 100644 --- a/server/src/test/java/org/opensearch/common/util/BinarySearcherTests.java +++ b/server/src/test/java/org/opensearch/common/util/BinarySearcherTests.java @@ -34,7 +34,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.lease.Releasables; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import org.junit.Before; diff --git a/server/src/test/java/org/opensearch/common/util/BitArrayTests.java b/server/src/test/java/org/opensearch/common/util/BitArrayTests.java index 16e89b81c695c..e88e2b20c6155 100644 --- a/server/src/test/java/org/opensearch/common/util/BitArrayTests.java +++ b/server/src/test/java/org/opensearch/common/util/BitArrayTests.java @@ -32,12 +32,12 @@ package org.opensearch.common.util; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; -import org.opensearch.common.breaker.NoopCircuitBreaker; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import java.util.ArrayList; diff --git a/server/src/test/java/org/opensearch/common/util/BytesRefHashTests.java b/server/src/test/java/org/opensearch/common/util/BytesRefHashTests.java index 8b719283ed71d..905fbfe4e244b 100644 --- a/server/src/test/java/org/opensearch/common/util/BytesRefHashTests.java +++ b/server/src/test/java/org/opensearch/common/util/BytesRefHashTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.tests.util.TestUtil; import org.opensearch.common.settings.Settings; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import java.util.HashMap; diff --git a/server/src/test/java/org/opensearch/common/util/LongHashTests.java b/server/src/test/java/org/opensearch/common/util/LongHashTests.java index 295497b9f188c..3716c7ad277b3 100644 --- a/server/src/test/java/org/opensearch/common/util/LongHashTests.java +++ b/server/src/test/java/org/opensearch/common/util/LongHashTests.java @@ -33,7 +33,7 @@ package org.opensearch.common.util; import org.opensearch.common.settings.Settings; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import java.util.HashMap; diff --git a/server/src/test/java/org/opensearch/common/util/LongLongHashTests.java b/server/src/test/java/org/opensearch/common/util/LongLongHashTests.java index fd0f1b5bad4a7..e0b0182216c27 100644 --- a/server/src/test/java/org/opensearch/common/util/LongLongHashTests.java +++ b/server/src/test/java/org/opensearch/common/util/LongLongHashTests.java @@ -33,7 +33,7 @@ package org.opensearch.common.util; import org.opensearch.common.settings.Settings; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import java.util.ArrayList; diff --git a/server/src/test/java/org/opensearch/common/util/LongObjectHashMapTests.java b/server/src/test/java/org/opensearch/common/util/LongObjectHashMapTests.java index 73a6fc5d54883..dc4743189f4cf 100644 --- a/server/src/test/java/org/opensearch/common/util/LongObjectHashMapTests.java +++ b/server/src/test/java/org/opensearch/common/util/LongObjectHashMapTests.java @@ -33,7 +33,7 @@ package org.opensearch.common.util; import org.opensearch.common.settings.Settings; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import java.util.HashMap; diff --git a/server/src/test/java/org/opensearch/common/util/concurrent/AbstractLifecycleRunnableTests.java b/server/src/test/java/org/opensearch/common/util/concurrent/AbstractLifecycleRunnableTests.java index d771fdbef38d6..20e8916565daf 100644 --- a/server/src/test/java/org/opensearch/common/util/concurrent/AbstractLifecycleRunnableTests.java +++ b/server/src/test/java/org/opensearch/common/util/concurrent/AbstractLifecycleRunnableTests.java @@ -33,7 +33,7 @@ import org.apache.logging.log4j.Logger; import org.opensearch.common.SuppressLoggerChecks; -import org.opensearch.common.component.Lifecycle; +import org.opensearch.common.lifecycle.Lifecycle; import org.opensearch.test.OpenSearchTestCase; import org.mockito.InOrder; diff --git a/server/src/test/java/org/opensearch/discovery/FileBasedSeedHostsProviderTests.java b/server/src/test/java/org/opensearch/discovery/FileBasedSeedHostsProviderTests.java index 61aac8247647a..acf94483c8116 100644 --- a/server/src/test/java/org/opensearch/discovery/FileBasedSeedHostsProviderTests.java +++ b/server/src/test/java/org/opensearch/discovery/FileBasedSeedHostsProviderTests.java @@ -41,7 +41,7 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.CancellableThreads; import org.opensearch.common.util.PageCacheRecycler; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.transport.MockTransportService; import org.opensearch.threadpool.TestThreadPool; diff --git a/server/src/test/java/org/opensearch/discovery/SeedHostsResolverTests.java b/server/src/test/java/org/opensearch/discovery/SeedHostsResolverTests.java index 95f84a70986e1..07491cc0d6435 100644 --- a/server/src/test/java/org/opensearch/discovery/SeedHostsResolverTests.java +++ b/server/src/test/java/org/opensearch/discovery/SeedHostsResolverTests.java @@ -47,7 +47,7 @@ import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.common.util.concurrent.FutureUtils; import org.opensearch.common.util.io.IOUtils; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; diff --git a/server/src/test/java/org/opensearch/env/NodeRepurposeCommandTests.java b/server/src/test/java/org/opensearch/env/NodeRepurposeCommandTests.java index 382698a300591..a55d89ed37cd7 100644 --- a/server/src/test/java/org/opensearch/env/NodeRepurposeCommandTests.java +++ b/server/src/test/java/org/opensearch/env/NodeRepurposeCommandTests.java @@ -46,8 +46,8 @@ import org.opensearch.common.CheckedRunnable; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.BigArrays; import org.opensearch.gateway.PersistedClusterStateService; import org.opensearch.core.index.Index; diff --git a/server/src/test/java/org/opensearch/extensions/ExtensionsManagerTests.java b/server/src/test/java/org/opensearch/extensions/ExtensionsManagerTests.java index 41b841aec7be8..9f1050351c7b5 100644 --- a/server/src/test/java/org/opensearch/extensions/ExtensionsManagerTests.java +++ b/server/src/test/java/org/opensearch/extensions/ExtensionsManagerTests.java @@ -66,7 +66,7 @@ import org.opensearch.extensions.settings.RegisterCustomSettingsRequest; import org.opensearch.extensions.ExtensionsSettings.Extension; import org.opensearch.identity.IdentityService; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.plugins.ExtensionAwarePlugin; import org.opensearch.rest.RestController; import org.opensearch.test.FeatureFlagSetter; diff --git a/server/src/test/java/org/opensearch/extensions/action/ExtensionTransportActionsHandlerTests.java b/server/src/test/java/org/opensearch/extensions/action/ExtensionTransportActionsHandlerTests.java index e46e05cd2dbec..bc216bd3bbbb8 100644 --- a/server/src/test/java/org/opensearch/extensions/action/ExtensionTransportActionsHandlerTests.java +++ b/server/src/test/java/org/opensearch/extensions/action/ExtensionTransportActionsHandlerTests.java @@ -25,7 +25,7 @@ import org.opensearch.extensions.DiscoveryExtensionNode; import org.opensearch.extensions.AcknowledgedResponse; import org.opensearch.extensions.rest.RestSendToExtensionActionTests; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.client.NoOpNodeClient; import org.opensearch.test.transport.MockTransportService; diff --git a/server/src/test/java/org/opensearch/extensions/rest/RestInitializeExtensionActionTests.java b/server/src/test/java/org/opensearch/extensions/rest/RestInitializeExtensionActionTests.java index 030df865c4e4a..1faa3bd9aec55 100644 --- a/server/src/test/java/org/opensearch/extensions/rest/RestInitializeExtensionActionTests.java +++ b/server/src/test/java/org/opensearch/extensions/rest/RestInitializeExtensionActionTests.java @@ -34,7 +34,7 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.extensions.ExtensionsManager; import org.opensearch.extensions.ExtensionsSettings; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.rest.RestRequest; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; diff --git a/server/src/test/java/org/opensearch/extensions/rest/RestSendToExtensionActionTests.java b/server/src/test/java/org/opensearch/extensions/rest/RestSendToExtensionActionTests.java index 8695e409de0bc..b59513fc0045d 100644 --- a/server/src/test/java/org/opensearch/extensions/rest/RestSendToExtensionActionTests.java +++ b/server/src/test/java/org/opensearch/extensions/rest/RestSendToExtensionActionTests.java @@ -46,7 +46,7 @@ import org.opensearch.extensions.action.ExtensionAction; import org.opensearch.extensions.action.ExtensionTransportAction; import org.opensearch.identity.IdentityService; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.rest.NamedRoute; import org.opensearch.rest.RestHandler.Route; import org.opensearch.rest.RestRequest.Method; diff --git a/server/src/test/java/org/opensearch/extensions/settings/RegisterCustomSettingsTests.java b/server/src/test/java/org/opensearch/extensions/settings/RegisterCustomSettingsTests.java index bf9234a7f129d..22d07d7f18920 100644 --- a/server/src/test/java/org/opensearch/extensions/settings/RegisterCustomSettingsTests.java +++ b/server/src/test/java/org/opensearch/extensions/settings/RegisterCustomSettingsTests.java @@ -16,8 +16,8 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/gateway/GatewayMetaStatePersistedStateTests.java b/server/src/test/java/org/opensearch/gateway/GatewayMetaStatePersistedStateTests.java index 830a8a9ad8ab7..714a2389e64e1 100644 --- a/server/src/test/java/org/opensearch/gateway/GatewayMetaStatePersistedStateTests.java +++ b/server/src/test/java/org/opensearch/gateway/GatewayMetaStatePersistedStateTests.java @@ -58,7 +58,7 @@ import org.opensearch.env.Environment; import org.opensearch.env.NodeEnvironment; import org.opensearch.env.TestEnvironment; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.node.Node; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; diff --git a/server/src/test/java/org/opensearch/gateway/PersistedClusterStateServiceTests.java b/server/src/test/java/org/opensearch/gateway/PersistedClusterStateServiceTests.java index 211c13e00e6db..14ed610942e15 100644 --- a/server/src/test/java/org/opensearch/gateway/PersistedClusterStateServiceTests.java +++ b/server/src/test/java/org/opensearch/gateway/PersistedClusterStateServiceTests.java @@ -62,7 +62,7 @@ import org.opensearch.env.NodeMetadata; import org.opensearch.gateway.PersistedClusterStateService.Writer; import org.opensearch.core.index.Index; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.MockLogAppender; import org.opensearch.test.junit.annotations.TestLogging; diff --git a/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java index b36b3110b2123..fbf0e8cd42c72 100644 --- a/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java @@ -46,7 +46,7 @@ import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.rest.RestChannel; import org.opensearch.rest.RestRequest; import org.opensearch.rest.RestResponse; diff --git a/server/src/test/java/org/opensearch/http/DefaultRestChannelTests.java b/server/src/test/java/org/opensearch/http/DefaultRestChannelTests.java index 1ffe0538edaad..fb545e8db582e 100644 --- a/server/src/test/java/org/opensearch/http/DefaultRestChannelTests.java +++ b/server/src/test/java/org/opensearch/http/DefaultRestChannelTests.java @@ -47,7 +47,7 @@ import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.lease.Releasable; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; import org.opensearch.rest.RestRequest; diff --git a/server/src/test/java/org/opensearch/index/IndexModuleTests.java b/server/src/test/java/org/opensearch/index/IndexModuleTests.java index 95d0ae706b286..2b89558e394fc 100644 --- a/server/src/test/java/org/opensearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/opensearch/index/IndexModuleTests.java @@ -56,7 +56,7 @@ import org.opensearch.common.SetOnce; import org.opensearch.common.SetOnce.AlreadySetException; import org.opensearch.common.UUIDs; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; @@ -99,8 +99,8 @@ import org.opensearch.indices.IndicesModule; import org.opensearch.indices.IndicesQueryCache; import org.opensearch.indices.analysis.AnalysisModule; -import org.opensearch.indices.breaker.CircuitBreakerService; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason; import org.opensearch.indices.fielddata.cache.IndicesFieldDataCache; import org.opensearch.indices.mapper.MapperRegistry; diff --git a/server/src/test/java/org/opensearch/index/IndexSettingsTests.java b/server/src/test/java/org/opensearch/index/IndexSettingsTests.java index e42e9b4970081..f91905fea9561 100644 --- a/server/src/test/java/org/opensearch/index/IndexSettingsTests.java +++ b/server/src/test/java/org/opensearch/index/IndexSettingsTests.java @@ -41,7 +41,7 @@ import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsException; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.translog.Translog; diff --git a/server/src/test/java/org/opensearch/index/MergePolicySettingsTests.java b/server/src/test/java/org/opensearch/index/MergePolicySettingsTests.java index b1dba0c22c075..387997892ee30 100644 --- a/server/src/test/java/org/opensearch/index/MergePolicySettingsTests.java +++ b/server/src/test/java/org/opensearch/index/MergePolicySettingsTests.java @@ -33,8 +33,8 @@ import org.apache.lucene.index.NoMergePolicy; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.index.shard.ShardId; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index c2ac8b0e1d3b3..d58d166415446 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -111,7 +111,7 @@ import org.opensearch.common.lucene.uid.VersionsAndSeqNoResolver; import org.opensearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndSeqNo; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.concurrent.AbstractRunnable; @@ -151,7 +151,7 @@ import org.opensearch.index.translog.TranslogDeletionPolicyFactory; import org.opensearch.index.translog.TranslogException; import org.opensearch.index.translog.listener.TranslogEventListener; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.IndexSettingsModule; import org.opensearch.test.VersionUtils; import org.opensearch.threadpool.ThreadPool; diff --git a/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java b/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java index b9fe69c282471..5fe6c2d08cdc7 100644 --- a/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java @@ -44,7 +44,7 @@ import org.opensearch.cluster.routing.TestShardRouting; import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.io.IOUtils; import org.opensearch.index.IndexSettings; diff --git a/server/src/test/java/org/opensearch/index/fielddata/FieldDataCacheTests.java b/server/src/test/java/org/opensearch/index/fielddata/FieldDataCacheTests.java index 48ab9d60727b7..babf0782cdec0 100644 --- a/server/src/test/java/org/opensearch/index/fielddata/FieldDataCacheTests.java +++ b/server/src/test/java/org/opensearch/index/fielddata/FieldDataCacheTests.java @@ -49,7 +49,7 @@ import org.opensearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; import org.opensearch.index.mapper.TextFieldMapper; import org.opensearch.core.index.shard.ShardId; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.search.aggregations.support.CoreValuesSourceType; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.FieldMaskingReader; diff --git a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java index 915a15da6cb1d..1340ff1868a11 100644 --- a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java @@ -141,7 +141,7 @@ import org.opensearch.index.translog.TranslogStats; import org.opensearch.index.translog.listener.TranslogEventListener; import org.opensearch.indices.IndicesQueryCache; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.indices.fielddata.cache.IndicesFieldDataCache; import org.opensearch.indices.recovery.RecoveryState; import org.opensearch.indices.recovery.RecoveryTarget; diff --git a/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java index 282f4c09b982d..b51ceb2250e8f 100644 --- a/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java @@ -47,7 +47,7 @@ import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.network.NetworkModule; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; diff --git a/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java index 1be6c07539cb7..f3cffa4721ee5 100644 --- a/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java @@ -71,7 +71,7 @@ import org.opensearch.index.store.Store; import org.opensearch.index.translog.Translog; import org.opensearch.index.translog.TranslogConfig; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.DummyShardLock; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.IndexSettingsModule; diff --git a/server/src/test/java/org/opensearch/index/snapshots/blobstore/FileInfoTests.java b/server/src/test/java/org/opensearch/index/snapshots/blobstore/FileInfoTests.java index 200688f35352c..566a53c2508c8 100644 --- a/server/src/test/java/org/opensearch/index/snapshots/blobstore/FileInfoTests.java +++ b/server/src/test/java/org/opensearch/index/snapshots/blobstore/FileInfoTests.java @@ -35,7 +35,7 @@ import org.apache.lucene.util.Version; import org.opensearch.OpenSearchParseException; import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/test/java/org/opensearch/index/store/remote/file/OnDemandBlockSnapshotIndexInputTests.java b/server/src/test/java/org/opensearch/index/store/remote/file/OnDemandBlockSnapshotIndexInputTests.java index ab0046c9554f5..350fc384458ab 100644 --- a/server/src/test/java/org/opensearch/index/store/remote/file/OnDemandBlockSnapshotIndexInputTests.java +++ b/server/src/test/java/org/opensearch/index/store/remote/file/OnDemandBlockSnapshotIndexInputTests.java @@ -18,7 +18,7 @@ import org.apache.lucene.util.Constants; import org.apache.lucene.util.Version; import org.junit.Before; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot; import org.opensearch.index.store.StoreFileMetadata; import org.opensearch.index.store.remote.utils.BlobFetchRequest; diff --git a/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheCleanerTests.java b/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheCleanerTests.java index eec9666cc06ba..7a912ecf98292 100644 --- a/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheCleanerTests.java +++ b/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheCleanerTests.java @@ -20,8 +20,8 @@ import org.junit.Before; import org.opensearch.Version; import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.common.settings.Settings; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.IndexSettings; diff --git a/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheTests.java b/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheTests.java index 02b6a48b6f48e..19bfc38056af1 100644 --- a/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheTests.java +++ b/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheTests.java @@ -14,9 +14,9 @@ import org.opensearch.common.breaker.TestCircuitBreaker; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.store.remote.directory.RemoteSnapshotDirectoryFactory; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.index.store.remote.utils.cache.CacheUsage; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/index/store/remote/utils/TransferManagerTests.java b/server/src/test/java/org/opensearch/index/store/remote/utils/TransferManagerTests.java index 962b3a58c0658..ddf61de292eed 100644 --- a/server/src/test/java/org/opensearch/index/store/remote/utils/TransferManagerTests.java +++ b/server/src/test/java/org/opensearch/index/store/remote/utils/TransferManagerTests.java @@ -25,8 +25,8 @@ import org.junit.After; import org.junit.Before; import org.opensearch.common.blobstore.BlobContainer; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.index.store.remote.file.CleanerDaemonThreadLeakFilter; import org.opensearch.index.store.remote.filecache.FileCache; import org.opensearch.index.store.remote.filecache.FileCacheFactory; diff --git a/server/src/test/java/org/opensearch/index/translog/LocalTranslogTests.java b/server/src/test/java/org/opensearch/index/translog/LocalTranslogTests.java index 788391f0e42c0..cd10b7bde3a73 100644 --- a/server/src/test/java/org/opensearch/index/translog/LocalTranslogTests.java +++ b/server/src/test/java/org/opensearch/index/translog/LocalTranslogTests.java @@ -63,8 +63,8 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.common.util.concurrent.ReleasableLock; diff --git a/server/src/test/java/org/opensearch/index/translog/RemoteFSTranslogTests.java b/server/src/test/java/org/opensearch/index/translog/RemoteFSTranslogTests.java index d26379eaefa5c..349f78c3c2d7b 100644 --- a/server/src/test/java/org/opensearch/index/translog/RemoteFSTranslogTests.java +++ b/server/src/test/java/org/opensearch/index/translog/RemoteFSTranslogTests.java @@ -30,8 +30,8 @@ import org.opensearch.common.bytes.ReleasableBytesReference; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.common.util.io.IOUtils; diff --git a/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java b/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java index 62984f43744c8..19ed7122741aa 100644 --- a/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java +++ b/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java @@ -36,8 +36,8 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.SetOnce; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.codec.CodecService; import org.opensearch.index.engine.EngineConfig; diff --git a/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java index a06688150a38a..2a03ddc6f46e4 100644 --- a/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java @@ -51,7 +51,7 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.util.io.IOUtils; diff --git a/server/src/test/java/org/opensearch/indices/breaker/BreakerSettingsTests.java b/server/src/test/java/org/opensearch/indices/breaker/BreakerSettingsTests.java index 93c3b0f48315d..7b1aee370ddb7 100644 --- a/server/src/test/java/org/opensearch/indices/breaker/BreakerSettingsTests.java +++ b/server/src/test/java/org/opensearch/indices/breaker/BreakerSettingsTests.java @@ -32,7 +32,7 @@ package org.opensearch.indices.breaker; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.settings.Settings; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java b/server/src/test/java/org/opensearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java index 7bdff59e7c334..7ed072306e2cb 100644 --- a/server/src/test/java/org/opensearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java +++ b/server/src/test/java/org/opensearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java @@ -33,13 +33,14 @@ package org.opensearch.indices.breaker; import org.opensearch.common.breaker.ChildMemoryCircuitBreaker; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.monitor.jvm.JvmInfo; import org.opensearch.search.aggregations.MultiBucketConsumerService; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/indices/memory/breaker/CircuitBreakerUnitTests.java b/server/src/test/java/org/opensearch/indices/memory/breaker/CircuitBreakerUnitTests.java index a2705a0730aaf..fad619f40222b 100644 --- a/server/src/test/java/org/opensearch/indices/memory/breaker/CircuitBreakerUnitTests.java +++ b/server/src/test/java/org/opensearch/indices/memory/breaker/CircuitBreakerUnitTests.java @@ -32,7 +32,7 @@ package org.opensearch.indices.memory.breaker; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.settings.Settings; import org.opensearch.indices.breaker.BreakerSettings; import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; diff --git a/server/src/test/java/org/opensearch/monitor/fs/FsProbeTests.java b/server/src/test/java/org/opensearch/monitor/fs/FsProbeTests.java index 216594f24e2ea..06a8e27b079ee 100644 --- a/server/src/test/java/org/opensearch/monitor/fs/FsProbeTests.java +++ b/server/src/test/java/org/opensearch/monitor/fs/FsProbeTests.java @@ -33,12 +33,12 @@ package org.opensearch.monitor.fs; import org.apache.lucene.util.Constants; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.env.NodeEnvironment; import org.opensearch.env.NodeEnvironment.NodePath; import org.opensearch.index.store.remote.filecache.FileCache; diff --git a/server/src/test/java/org/opensearch/monitor/jvm/JvmGcMonitorServiceTests.java b/server/src/test/java/org/opensearch/monitor/jvm/JvmGcMonitorServiceTests.java index 3ea53bd0ea811..6c0bdfc6cede1 100644 --- a/server/src/test/java/org/opensearch/monitor/jvm/JvmGcMonitorServiceTests.java +++ b/server/src/test/java/org/opensearch/monitor/jvm/JvmGcMonitorServiceTests.java @@ -33,7 +33,7 @@ package org.opensearch.monitor.jvm; import org.apache.logging.log4j.Logger; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/monitor/jvm/JvmStatsTests.java b/server/src/test/java/org/opensearch/monitor/jvm/JvmStatsTests.java index 6a7d3afdc5dad..e6f45ca47a2fe 100644 --- a/server/src/test/java/org/opensearch/monitor/jvm/JvmStatsTests.java +++ b/server/src/test/java/org/opensearch/monitor/jvm/JvmStatsTests.java @@ -32,7 +32,7 @@ package org.opensearch.monitor.jvm; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.common.Strings; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/node/NodeTests.java b/server/src/test/java/org/opensearch/node/NodeTests.java index ae8028c143498..6dfecd8a692f1 100644 --- a/server/src/test/java/org/opensearch/node/NodeTests.java +++ b/server/src/test/java/org/opensearch/node/NodeTests.java @@ -37,13 +37,13 @@ import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.node.DiscoveryNodeRole; import org.opensearch.common.SetOnce; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.network.NetworkModule; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsException; import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.env.Environment; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.IndexService; @@ -51,7 +51,7 @@ import org.opensearch.index.shard.IndexShard; import org.opensearch.indices.IndicesService; import org.opensearch.indices.breaker.BreakerSettings; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.monitor.fs.FsInfo; import org.opensearch.monitor.fs.FsProbe; import org.opensearch.plugins.CircuitBreakerPlugin; diff --git a/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java b/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java index 7b17d4d3b0471..347fef773fc8a 100644 --- a/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java +++ b/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java @@ -42,7 +42,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.BoundTransportAddress; import org.opensearch.common.transport.TransportAddress; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.http.HttpInfo; diff --git a/server/src/test/java/org/opensearch/persistent/TestPersistentTasksPlugin.java b/server/src/test/java/org/opensearch/persistent/TestPersistentTasksPlugin.java index 3991f6721a350..63f753f698597 100644 --- a/server/src/test/java/org/opensearch/persistent/TestPersistentTasksPlugin.java +++ b/server/src/test/java/org/opensearch/persistent/TestPersistentTasksPlugin.java @@ -53,7 +53,7 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.Strings; -import org.opensearch.common.component.Lifecycle; +import org.opensearch.common.lifecycle.Lifecycle; import org.opensearch.common.inject.Inject; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.common.io.stream.StreamInput; diff --git a/server/src/test/java/org/opensearch/repositories/RepositoriesServiceTests.java b/server/src/test/java/org/opensearch/repositories/RepositoriesServiceTests.java index 6cbbb4ffcb7c7..085a64b439bbe 100644 --- a/server/src/test/java/org/opensearch/repositories/RepositoriesServiceTests.java +++ b/server/src/test/java/org/opensearch/repositories/RepositoriesServiceTests.java @@ -51,8 +51,8 @@ import org.opensearch.common.UUIDs; import org.opensearch.common.blobstore.BlobPath; import org.opensearch.common.blobstore.BlobStore; -import org.opensearch.common.component.Lifecycle; -import org.opensearch.common.component.LifecycleListener; +import org.opensearch.common.lifecycle.Lifecycle; +import org.opensearch.common.lifecycle.LifecycleListener; import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.index.mapper.MapperService; diff --git a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java index 9d711d464754c..893631ce4b564 100644 --- a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -45,7 +45,7 @@ import org.opensearch.common.blobstore.BlobContainer; import org.opensearch.common.blobstore.BlobPath; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.index.shard.ShardId; import org.opensearch.core.xcontent.NamedXContentRegistry; diff --git a/server/src/test/java/org/opensearch/repositories/fs/FsRepositoryTests.java b/server/src/test/java/org/opensearch/repositories/fs/FsRepositoryTests.java index 3049aa161f10b..752bd5f5ede82 100644 --- a/server/src/test/java/org/opensearch/repositories/fs/FsRepositoryTests.java +++ b/server/src/test/java/org/opensearch/repositories/fs/FsRepositoryTests.java @@ -59,7 +59,7 @@ import org.opensearch.common.lucene.Lucene; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; import org.opensearch.index.IndexSettings; diff --git a/server/src/test/java/org/opensearch/rest/RestControllerTests.java b/server/src/test/java/org/opensearch/rest/RestControllerTests.java index f8ea59040d22b..ea0ce54913a8d 100644 --- a/server/src/test/java/org/opensearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/opensearch/rest/RestControllerTests.java @@ -33,15 +33,15 @@ package org.opensearch.rest; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.BoundTransportAddress; import org.opensearch.common.transport.TransportAddress; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.NamedXContentRegistry; diff --git a/server/src/test/java/org/opensearch/rest/RestHttpResponseHeadersTests.java b/server/src/test/java/org/opensearch/rest/RestHttpResponseHeadersTests.java index b181d571c14fb..b1ddea9329885 100644 --- a/server/src/test/java/org/opensearch/rest/RestHttpResponseHeadersTests.java +++ b/server/src/test/java/org/opensearch/rest/RestHttpResponseHeadersTests.java @@ -39,7 +39,7 @@ import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.core.rest.RestStatus; import org.opensearch.identity.IdentityService; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.rest.FakeRestChannel; diff --git a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestValidateQueryActionTests.java b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestValidateQueryActionTests.java index 094b4d8b8c12d..2f15090e49f75 100644 --- a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestValidateQueryActionTests.java +++ b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestValidateQueryActionTests.java @@ -45,7 +45,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentType; import org.opensearch.identity.IdentityService; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.rest.RestController; import org.opensearch.rest.RestRequest; import org.opensearch.search.AbstractSearchTestCase; diff --git a/server/src/test/java/org/opensearch/rest/action/cat/RestCatSegmentReplicationActionTests.java b/server/src/test/java/org/opensearch/rest/action/cat/RestCatSegmentReplicationActionTests.java index 44d8f84eef524..339e6870efb92 100644 --- a/server/src/test/java/org/opensearch/rest/action/cat/RestCatSegmentReplicationActionTests.java +++ b/server/src/test/java/org/opensearch/rest/action/cat/RestCatSegmentReplicationActionTests.java @@ -15,7 +15,7 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.common.Randomness; import org.opensearch.common.Table; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.index.Index; import org.opensearch.index.SegmentReplicationPerGroupStats; diff --git a/server/src/test/java/org/opensearch/script/ScriptCacheTests.java b/server/src/test/java/org/opensearch/script/ScriptCacheTests.java index 6395b8b315196..a7d573de5f5e3 100644 --- a/server/src/test/java/org/opensearch/script/ScriptCacheTests.java +++ b/server/src/test/java/org/opensearch/script/ScriptCacheTests.java @@ -31,7 +31,7 @@ package org.opensearch.script; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.rest.RestStatus; diff --git a/server/src/test/java/org/opensearch/script/ScriptServiceTests.java b/server/src/test/java/org/opensearch/script/ScriptServiceTests.java index 489b267f586e4..a4288eeab4524 100644 --- a/server/src/test/java/org/opensearch/script/ScriptServiceTests.java +++ b/server/src/test/java/org/opensearch/script/ScriptServiceTests.java @@ -36,7 +36,7 @@ import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.Metadata; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.settings.ClusterSettings; diff --git a/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java index 8d8f2856d7703..958548ed49d67 100644 --- a/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java @@ -65,7 +65,7 @@ import org.opensearch.index.query.QueryShardContext; import org.opensearch.index.shard.IndexShard; import org.opensearch.core.index.shard.ShardId; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.search.internal.AliasFilter; import org.opensearch.search.internal.LegacyReaderContext; import org.opensearch.search.internal.PitReaderContext; diff --git a/server/src/test/java/org/opensearch/search/aggregations/AggregatorBaseTests.java b/server/src/test/java/org/opensearch/search/aggregations/AggregatorBaseTests.java index 312ebfcfae5e8..6ae719cf00e7a 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/AggregatorBaseTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/AggregatorBaseTests.java @@ -38,7 +38,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.BigArrays; import org.opensearch.index.IndexService; @@ -47,7 +47,7 @@ import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.NumberFieldMapper; import org.opensearch.index.query.QueryShardContext; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.search.aggregations.support.ValuesSourceConfig; import org.opensearch.search.internal.SearchContext; import org.opensearch.test.OpenSearchSingleNodeTestCase; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/BucketsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/BucketsAggregatorTests.java index 55f8d11e2d934..5f33777562721 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/BucketsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/BucketsAggregatorTests.java @@ -40,9 +40,9 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.index.mapper.NumberFieldMapper; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.search.aggregations.AggregatorFactories; import org.opensearch.search.aggregations.AggregatorTestCase; import org.opensearch.search.aggregations.InternalAggregation; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java index 72b11c1657fb5..ea0e45687c583 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java @@ -33,11 +33,11 @@ package org.opensearch.search.aggregations.bucket.histogram; import org.apache.lucene.tests.util.TestUtil; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.script.ScriptService; import org.opensearch.search.DocValueFormat; import org.opensearch.search.aggregations.InternalAggregation; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java index 427d0b89aa688..a7a65419b73b1 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java @@ -48,7 +48,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.search.aggregations.AggregatorTestCase; import org.opensearch.search.aggregations.BucketCollector; import org.opensearch.search.aggregations.LeafBucketCollector; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BytesKeyedBucketOrdsTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BytesKeyedBucketOrdsTests.java index f73146038a5f1..c593ed4147e7e 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BytesKeyedBucketOrdsTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BytesKeyedBucketOrdsTests.java @@ -36,7 +36,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.search.aggregations.CardinalityUpperBound; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/LongKeyedBucketOrdsTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/LongKeyedBucketOrdsTests.java index d0a6500ecf9e7..bdecae8f7dfa3 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/LongKeyedBucketOrdsTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/LongKeyedBucketOrdsTests.java @@ -35,7 +35,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.search.aggregations.CardinalityUpperBound; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregatorTests.java index a2792114e9529..6c27335502e34 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregatorTests.java @@ -41,7 +41,7 @@ import org.opensearch.index.mapper.NumberFieldMapper; import org.opensearch.index.query.QueryShardContext; import org.opensearch.index.shard.IndexShard; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.script.MockScriptEngine; import org.opensearch.script.Script; import org.opensearch.script.ScriptEngine; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index 4ce66e0495f83..6a407469e5273 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -52,7 +52,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.network.InetAddresses; import org.opensearch.common.settings.Settings; @@ -74,7 +74,7 @@ import org.opensearch.index.mapper.Uid; import org.opensearch.index.query.MatchAllQueryBuilder; import org.opensearch.index.query.QueryBuilders; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.script.MockScriptEngine; import org.opensearch.script.Script; import org.opensearch.script.ScriptEngine; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/HyperLogLogPlusPlusSparseTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/HyperLogLogPlusPlusSparseTests.java index 5968605fc0c34..cca51a8fc3837 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/HyperLogLogPlusPlusSparseTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/HyperLogLogPlusPlusSparseTests.java @@ -32,13 +32,13 @@ package org.opensearch.search.aggregations.metrics; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.BitMixer; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/HyperLogLogPlusPlusTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/HyperLogLogPlusPlusTests.java index 6f5f7494331a5..52fe314eaf079 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/HyperLogLogPlusPlusTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/HyperLogLogPlusPlusTests.java @@ -32,12 +32,12 @@ package org.opensearch.search.aggregations.metrics; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.BitMixer; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import java.util.HashSet; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalCardinalityTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalCardinalityTests.java index a95533bb3c33c..e5abe380091dc 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalCardinalityTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalCardinalityTests.java @@ -37,7 +37,7 @@ import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.common.lease.Releasables; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.search.aggregations.ParsedAggregation; import org.opensearch.test.InternalAggregationTestCase; import org.junit.After; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/MinAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/MinAggregatorTests.java index f0273b46ef97e..9deff7839ffda 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/MinAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/MinAggregatorTests.java @@ -69,7 +69,7 @@ import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.NumberFieldMapper; import org.opensearch.index.query.QueryShardContext; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.script.MockScriptEngine; import org.opensearch.script.Script; import org.opensearch.script.ScriptEngine; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java index 6a4ecf01577d8..c0b5743bbfb9a 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java @@ -44,16 +44,16 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.opensearch.common.CheckedConsumer; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.BigArrays; import org.opensearch.index.IndexSettings; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryShardContext; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.script.MockScriptEngine; import org.opensearch.script.Script; import org.opensearch.script.ScriptEngine; diff --git a/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java index d0f4a974fc305..9fc0be38d66a5 100644 --- a/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java +++ b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java @@ -37,8 +37,8 @@ import org.opensearch.cluster.metadata.Metadata; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; import org.opensearch.common.metrics.OperationStats; import org.opensearch.common.settings.Settings; diff --git a/server/src/test/java/org/opensearch/search/sort/BucketedSortTestCase.java b/server/src/test/java/org/opensearch/search/sort/BucketedSortTestCase.java index 98c04eaf9415a..78cfda8e70660 100644 --- a/server/src/test/java/org/opensearch/search/sort/BucketedSortTestCase.java +++ b/server/src/test/java/org/opensearch/search/sort/BucketedSortTestCase.java @@ -40,7 +40,7 @@ import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.common.lease.Releasable; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.search.DocValueFormat; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java index e4dec5163c400..32e0edf7c8f11 100644 --- a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java @@ -187,7 +187,7 @@ import org.opensearch.indices.ShardLimitValidator; import org.opensearch.indices.SystemIndices; import org.opensearch.indices.analysis.AnalysisModule; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.indices.cluster.IndicesClusterStateService; import org.opensearch.indices.mapper.MapperRegistry; import org.opensearch.indices.recovery.PeerRecoverySourceService; diff --git a/server/src/test/java/org/opensearch/test/NoopDiscovery.java b/server/src/test/java/org/opensearch/test/NoopDiscovery.java index c1127aed95c1c..f7365d7d71693 100644 --- a/server/src/test/java/org/opensearch/test/NoopDiscovery.java +++ b/server/src/test/java/org/opensearch/test/NoopDiscovery.java @@ -33,8 +33,8 @@ import org.opensearch.action.ActionListener; import org.opensearch.cluster.ClusterChangedEvent; -import org.opensearch.common.component.Lifecycle; -import org.opensearch.common.component.LifecycleListener; +import org.opensearch.common.lifecycle.Lifecycle; +import org.opensearch.common.lifecycle.LifecycleListener; import org.opensearch.discovery.Discovery; import org.opensearch.discovery.DiscoveryStats; diff --git a/server/src/test/java/org/opensearch/transport/InboundAggregatorTests.java b/server/src/test/java/org/opensearch/transport/InboundAggregatorTests.java index dfb14be54a5b9..fd99942d27255 100644 --- a/server/src/test/java/org/opensearch/transport/InboundAggregatorTests.java +++ b/server/src/test/java/org/opensearch/transport/InboundAggregatorTests.java @@ -33,7 +33,7 @@ package org.opensearch.transport; import org.opensearch.Version; -import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.common.breaker.TestCircuitBreaker; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.bytes.ReleasableBytesReference; diff --git a/server/src/test/java/org/opensearch/transport/InboundPipelineTests.java b/server/src/test/java/org/opensearch/transport/InboundPipelineTests.java index 33fe928d5c09d..c6b16d70fc7e0 100644 --- a/server/src/test/java/org/opensearch/transport/InboundPipelineTests.java +++ b/server/src/test/java/org/opensearch/transport/InboundPipelineTests.java @@ -33,9 +33,9 @@ package org.opensearch.transport; import org.opensearch.Version; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.CircuitBreakingException; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.common.breaker.TestCircuitBreaker; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; diff --git a/server/src/test/java/org/opensearch/transport/OutboundHandlerTests.java b/server/src/test/java/org/opensearch/transport/OutboundHandlerTests.java index c61ac51ba57d4..fe8be400b03c9 100644 --- a/server/src/test/java/org/opensearch/transport/OutboundHandlerTests.java +++ b/server/src/test/java/org/opensearch/transport/OutboundHandlerTests.java @@ -36,8 +36,8 @@ import org.opensearch.Version; import org.opensearch.action.ActionListener; import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.breaker.NoopCircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.bytes.ReleasableBytesReference; diff --git a/server/src/test/java/org/opensearch/transport/TcpTransportTests.java b/server/src/test/java/org/opensearch/transport/TcpTransportTests.java index 05d375579f3a5..39bc2d9bd2d48 100644 --- a/server/src/test/java/org/opensearch/transport/TcpTransportTests.java +++ b/server/src/test/java/org/opensearch/transport/TcpTransportTests.java @@ -38,7 +38,7 @@ import org.opensearch.Version; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.component.Lifecycle; +import org.opensearch.common.lifecycle.Lifecycle; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.network.NetworkService; import org.opensearch.common.network.NetworkUtils; @@ -46,7 +46,7 @@ import org.opensearch.common.transport.TransportAddress; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.MockPageCacheRecycler; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.MockLogAppender; import org.opensearch.test.junit.annotations.TestLogging; diff --git a/server/src/test/java/org/opensearch/transport/TransportServiceHandshakeTests.java b/server/src/test/java/org/opensearch/transport/TransportServiceHandshakeTests.java index 71c69acb75896..b2bdf8e3cb4d9 100644 --- a/server/src/test/java/org/opensearch/transport/TransportServiceHandshakeTests.java +++ b/server/src/test/java/org/opensearch/transport/TransportServiceHandshakeTests.java @@ -40,7 +40,7 @@ import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.PageCacheRecycler; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.MockLogAppender; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.VersionUtils; diff --git a/test/framework/src/main/java/org/opensearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/opensearch/cluster/coordination/AbstractCoordinatorTestCase.java index 98b4774ce1836..72fabc001760f 100644 --- a/test/framework/src/main/java/org/opensearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/opensearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -80,7 +80,7 @@ import org.opensearch.gateway.GatewayService; import org.opensearch.gateway.MockGatewayMetaState; import org.opensearch.gateway.PersistedClusterStateService; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.monitor.NodeHealthService; import org.opensearch.monitor.StatusInfo; import org.opensearch.test.OpenSearchTestCase; diff --git a/test/framework/src/main/java/org/opensearch/common/breaker/TestCircuitBreaker.java b/test/framework/src/main/java/org/opensearch/common/breaker/TestCircuitBreaker.java index bbacdd2ac4eec..9705a56700f72 100644 --- a/test/framework/src/main/java/org/opensearch/common/breaker/TestCircuitBreaker.java +++ b/test/framework/src/main/java/org/opensearch/common/breaker/TestCircuitBreaker.java @@ -32,6 +32,9 @@ package org.opensearch.common.breaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; +import org.opensearch.core.common.breaker.NoopCircuitBreaker; + import java.util.concurrent.atomic.AtomicBoolean; public class TestCircuitBreaker extends NoopCircuitBreaker { diff --git a/test/framework/src/main/java/org/opensearch/common/bytes/AbstractBytesReferenceTestCase.java b/test/framework/src/main/java/org/opensearch/common/bytes/AbstractBytesReferenceTestCase.java index 07fd383dc230a..7b12591365cf4 100644 --- a/test/framework/src/main/java/org/opensearch/common/bytes/AbstractBytesReferenceTestCase.java +++ b/test/framework/src/main/java/org/opensearch/common/bytes/AbstractBytesReferenceTestCase.java @@ -35,7 +35,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefIterator; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.ReleasableBytesStreamOutput; import org.opensearch.core.common.bytes.AbstractBytesReference; @@ -45,7 +45,7 @@ import org.opensearch.common.util.BigArrays; import org.opensearch.core.common.util.ByteArray; import org.opensearch.common.util.PageCacheRecycler; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; import java.io.EOFException; diff --git a/test/framework/src/main/java/org/opensearch/common/util/MockBigArrays.java b/test/framework/src/main/java/org/opensearch/common/util/MockBigArrays.java index 5ef68f0eab757..54d53b931f171 100644 --- a/test/framework/src/main/java/org/opensearch/common/util/MockBigArrays.java +++ b/test/framework/src/main/java/org/opensearch/common/util/MockBigArrays.java @@ -38,11 +38,11 @@ import org.apache.lucene.util.Accountables; import org.apache.lucene.util.BytesRef; import org.apache.lucene.tests.util.LuceneTestCase; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.util.set.Sets; import org.opensearch.core.common.util.BigArray; import org.opensearch.core.common.util.ByteArray; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import java.util.Collection; import java.util.Collections; diff --git a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java index 1ac92bbb479c3..df2e6da972154 100644 --- a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java @@ -119,8 +119,8 @@ import org.opensearch.index.translog.TranslogDeletionPolicy; import org.opensearch.index.translog.TranslogManager; import org.opensearch.index.translog.listener.TranslogEventListener; -import org.opensearch.indices.breaker.CircuitBreakerService; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.DummyShardLock; import org.opensearch.test.IndexSettingsModule; import org.opensearch.test.OpenSearchTestCase; diff --git a/test/framework/src/main/java/org/opensearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/opensearch/index/mapper/MapperTestCase.java index 57c09db577247..09e6cb07998c2 100644 --- a/test/framework/src/main/java/org/opensearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/mapper/MapperTestCase.java @@ -53,7 +53,7 @@ import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.IndexFieldDataCache; import org.opensearch.index.query.QueryShardContext; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.search.DocValueFormat; import org.opensearch.search.lookup.SearchLookup; diff --git a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java index 93b9742ada0da..7a492dbebd836 100644 --- a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java @@ -73,7 +73,7 @@ import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.io.IOUtils; import org.opensearch.common.xcontent.XContentType; @@ -112,7 +112,7 @@ import org.opensearch.index.translog.Translog; import org.opensearch.index.translog.TranslogFactory; import org.opensearch.indices.IndicesService; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; import org.opensearch.indices.recovery.AsyncRecoveryTarget; import org.opensearch.indices.recovery.PeerRecoveryTargetService; diff --git a/test/framework/src/main/java/org/opensearch/index/shard/RestoreOnlyRepository.java b/test/framework/src/main/java/org/opensearch/index/shard/RestoreOnlyRepository.java index a9e1a526b1786..3744b06c35a72 100644 --- a/test/framework/src/main/java/org/opensearch/index/shard/RestoreOnlyRepository.java +++ b/test/framework/src/main/java/org/opensearch/index/shard/RestoreOnlyRepository.java @@ -40,7 +40,7 @@ import org.opensearch.cluster.metadata.Metadata; import org.opensearch.cluster.metadata.RepositoryMetadata; import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.core.index.shard.ShardId; import org.opensearch.index.mapper.MapperService; import org.opensearch.index.snapshots.IndexShardSnapshotStatus; diff --git a/test/framework/src/main/java/org/opensearch/node/MockNode.java b/test/framework/src/main/java/org/opensearch/node/MockNode.java index 0bd6e1872a101..7d3a88fb1aba2 100644 --- a/test/framework/src/main/java/org/opensearch/node/MockNode.java +++ b/test/framework/src/main/java/org/opensearch/node/MockNode.java @@ -49,7 +49,7 @@ import org.opensearch.env.Environment; import org.opensearch.http.HttpServerTransport; import org.opensearch.indices.IndicesService; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.indices.recovery.RecoverySettings; import org.opensearch.plugins.Plugin; import org.opensearch.script.MockScriptService; diff --git a/test/framework/src/main/java/org/opensearch/node/RecoverySettingsChunkSizePlugin.java b/test/framework/src/main/java/org/opensearch/node/RecoverySettingsChunkSizePlugin.java index 062675737f277..dabf23ce08263 100644 --- a/test/framework/src/main/java/org/opensearch/node/RecoverySettingsChunkSizePlugin.java +++ b/test/framework/src/main/java/org/opensearch/node/RecoverySettingsChunkSizePlugin.java @@ -34,7 +34,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.indices.recovery.RecoverySettings; import org.opensearch.plugins.Plugin; diff --git a/test/framework/src/main/java/org/opensearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java b/test/framework/src/main/java/org/opensearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java index bcbf3b5b9a610..387a15899ce6f 100644 --- a/test/framework/src/main/java/org/opensearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java +++ b/test/framework/src/main/java/org/opensearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java @@ -40,7 +40,7 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.collect.Tuple; import org.opensearch.common.io.Streams; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.CountDown; import org.opensearch.test.OpenSearchTestCase; diff --git a/test/framework/src/main/java/org/opensearch/search/MockSearchService.java b/test/framework/src/main/java/org/opensearch/search/MockSearchService.java index 808dc50512c58..bad8b8cd87a9a 100644 --- a/test/framework/src/main/java/org/opensearch/search/MockSearchService.java +++ b/test/framework/src/main/java/org/opensearch/search/MockSearchService.java @@ -35,7 +35,7 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.util.BigArrays; import org.opensearch.indices.IndicesService; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.node.MockNode; import org.opensearch.plugins.Plugin; import org.opensearch.script.ScriptService; diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java index 60d337599771c..874d60a4097f2 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java @@ -63,7 +63,7 @@ import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.CheckedConsumer; import org.opensearch.common.TriFunction; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; import org.opensearch.common.network.NetworkAddress; @@ -110,8 +110,8 @@ import org.opensearch.index.shard.IndexShard; import org.opensearch.core.index.shard.ShardId; import org.opensearch.indices.IndicesModule; -import org.opensearch.indices.breaker.CircuitBreakerService; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.indices.fielddata.cache.IndicesFieldDataCache; import org.opensearch.indices.mapper.MapperRegistry; import org.opensearch.plugins.SearchPlugin; diff --git a/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java index e3b1c7af6891c..a169fb871ca53 100644 --- a/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -55,7 +55,7 @@ import org.opensearch.common.blobstore.BlobPath; import org.opensearch.common.compress.CompressorType; import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.DeprecationHandler; diff --git a/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java index 527e04b6a4eb5..e256e1b752a53 100644 --- a/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java @@ -73,7 +73,7 @@ import org.opensearch.index.similarity.SimilarityService; import org.opensearch.indices.IndicesModule; import org.opensearch.indices.analysis.AnalysisModule; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.indices.fielddata.cache.IndicesFieldDataCache; import org.opensearch.indices.mapper.MapperRegistry; import org.opensearch.node.InternalSettingsPreparer; diff --git a/test/framework/src/main/java/org/opensearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/opensearch/test/ExternalTestCluster.java index 4eb73487ac886..1869fc8c9b447 100644 --- a/test/framework/src/main/java/org/opensearch/test/ExternalTestCluster.java +++ b/test/framework/src/main/java/org/opensearch/test/ExternalTestCluster.java @@ -41,7 +41,7 @@ import org.opensearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.opensearch.client.Client; import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.network.NetworkModule; import org.opensearch.common.settings.Settings; diff --git a/test/framework/src/main/java/org/opensearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/opensearch/test/InternalAggregationTestCase.java index 95246e06d028c..a9e7b7d500cf1 100644 --- a/test/framework/src/main/java/org/opensearch/test/InternalAggregationTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/InternalAggregationTestCase.java @@ -33,7 +33,7 @@ package org.opensearch.test; import org.opensearch.common.SetOnce; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; @@ -47,7 +47,7 @@ import org.opensearch.core.xcontent.XContentParser; import org.opensearch.core.xcontent.XContentParserUtils; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.plugins.Plugin; import org.opensearch.plugins.SearchPlugin; import org.opensearch.rest.action.search.RestSearchAction; diff --git a/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java index e062c5d166f12..6d9767843400b 100644 --- a/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java @@ -65,15 +65,15 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.Nullable; import org.opensearch.common.Randomness; -import org.opensearch.common.breaker.CircuitBreaker; -import org.opensearch.common.component.LifecycleListener; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.common.lifecycle.LifecycleListener; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.MockSecureSettings; import org.opensearch.common.settings.SecureSettings; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.Settings.Builder; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.common.util.concurrent.OpenSearchExecutors; @@ -101,7 +101,7 @@ import org.opensearch.index.shard.IndexShardTestCase; import org.opensearch.core.index.shard.ShardId; import org.opensearch.indices.IndicesService; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; import org.opensearch.indices.fielddata.cache.IndicesFieldDataCache; import org.opensearch.indices.recovery.RecoverySettings; diff --git a/test/framework/src/main/java/org/opensearch/test/MockHttpTransport.java b/test/framework/src/main/java/org/opensearch/test/MockHttpTransport.java index 7661b21c7cc0e..e156449adc184 100644 --- a/test/framework/src/main/java/org/opensearch/test/MockHttpTransport.java +++ b/test/framework/src/main/java/org/opensearch/test/MockHttpTransport.java @@ -32,7 +32,7 @@ package org.opensearch.test; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.transport.BoundTransportAddress; import org.opensearch.common.transport.TransportAddress; import org.opensearch.http.HttpInfo; diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index 3564bd667ee2b..5bba700f53dc4 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -106,8 +106,8 @@ import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.TransportAddress; -import org.opensearch.common.unit.ByteSizeUnit; -import org.opensearch.common.unit.ByteSizeValue; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.concurrent.ThreadContext; diff --git a/test/framework/src/main/java/org/opensearch/test/rest/RestActionTestCase.java b/test/framework/src/main/java/org/opensearch/test/rest/RestActionTestCase.java index df2d3790d42bb..31f94039ae28f 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/RestActionTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/RestActionTestCase.java @@ -39,7 +39,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.identity.IdentityService; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.client.node.NodeClient; import org.opensearch.rest.RestController; import org.opensearch.rest.RestRequest; diff --git a/test/framework/src/main/java/org/opensearch/test/transport/FakeTransport.java b/test/framework/src/main/java/org/opensearch/test/transport/FakeTransport.java index b0aea3d2e3841..4d59afd5f99ed 100644 --- a/test/framework/src/main/java/org/opensearch/test/transport/FakeTransport.java +++ b/test/framework/src/main/java/org/opensearch/test/transport/FakeTransport.java @@ -34,7 +34,7 @@ import org.opensearch.action.ActionListener; import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.transport.BoundTransportAddress; import org.opensearch.common.transport.TransportAddress; import org.opensearch.transport.CloseableConnection; diff --git a/test/framework/src/main/java/org/opensearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/opensearch/test/transport/MockTransportService.java index 87f198fb40ee5..7a1d730ac1b27 100644 --- a/test/framework/src/main/java/org/opensearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/opensearch/test/transport/MockTransportService.java @@ -53,7 +53,7 @@ import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.concurrent.RunOnce; import org.opensearch.common.util.io.IOUtils; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.node.Node; import org.opensearch.plugins.Plugin; import org.opensearch.tasks.TaskManager; diff --git a/test/framework/src/main/java/org/opensearch/test/transport/StubbableTransport.java b/test/framework/src/main/java/org/opensearch/test/transport/StubbableTransport.java index 8c3b32dbb4ca4..8d66d481dc4aa 100644 --- a/test/framework/src/main/java/org/opensearch/test/transport/StubbableTransport.java +++ b/test/framework/src/main/java/org/opensearch/test/transport/StubbableTransport.java @@ -35,8 +35,8 @@ import org.opensearch.Version; import org.opensearch.action.ActionListener; import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.component.Lifecycle; -import org.opensearch.common.component.LifecycleListener; +import org.opensearch.common.lifecycle.Lifecycle; +import org.opensearch.common.lifecycle.LifecycleListener; import org.opensearch.common.transport.BoundTransportAddress; import org.opensearch.common.transport.TransportAddress; import org.opensearch.tasks.Task; diff --git a/test/framework/src/main/java/org/opensearch/transport/nio/MockNioTransport.java b/test/framework/src/main/java/org/opensearch/transport/nio/MockNioTransport.java index bfacf9d3f2080..73b2e5dd82f62 100644 --- a/test/framework/src/main/java/org/opensearch/transport/nio/MockNioTransport.java +++ b/test/framework/src/main/java/org/opensearch/transport/nio/MockNioTransport.java @@ -40,7 +40,7 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.bytes.CompositeBytesReference; import org.opensearch.common.bytes.ReleasableBytesReference; @@ -53,7 +53,7 @@ import org.opensearch.common.util.io.IOUtils; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.nio.BytesChannelContext; import org.opensearch.nio.BytesWriteHandler; import org.opensearch.nio.ChannelFactory; diff --git a/test/framework/src/main/java/org/opensearch/transport/nio/MockNioTransportPlugin.java b/test/framework/src/main/java/org/opensearch/transport/nio/MockNioTransportPlugin.java index e51f96a86bebf..6334788edbbf2 100644 --- a/test/framework/src/main/java/org/opensearch/transport/nio/MockNioTransportPlugin.java +++ b/test/framework/src/main/java/org/opensearch/transport/nio/MockNioTransportPlugin.java @@ -36,7 +36,7 @@ import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.PageCacheRecycler; -import org.opensearch.indices.breaker.CircuitBreakerService; +import org.opensearch.core.indices.breaker.CircuitBreakerService; import org.opensearch.plugins.NetworkPlugin; import org.opensearch.plugins.Plugin; import org.opensearch.threadpool.ThreadPool; diff --git a/test/framework/src/test/java/org/opensearch/transport/nio/SimpleMockNioTransportTests.java b/test/framework/src/test/java/org/opensearch/transport/nio/SimpleMockNioTransportTests.java index 8b0ffb2d0652d..868affc81be37 100644 --- a/test/framework/src/test/java/org/opensearch/transport/nio/SimpleMockNioTransportTests.java +++ b/test/framework/src/test/java/org/opensearch/transport/nio/SimpleMockNioTransportTests.java @@ -41,7 +41,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.TransportAddress; import org.opensearch.common.util.MockPageCacheRecycler; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; +import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.transport.AbstractSimpleTransportTestCase; import org.opensearch.transport.ConnectTransportException; import org.opensearch.transport.ConnectionProfile; From b2a734872458468fad51df46eab52d2449547d56 Mon Sep 17 00:00:00 2001 From: Ketan Verma <9292653+ketanv3@users.noreply.github.com> Date: Tue, 1 Aug 2023 09:14:36 +0530 Subject: [PATCH 33/75] Fix flaky ResourceAwareTasksTests.testBasicTaskResourceTracking test (#8993) Signed-off-by: Ketan Verma --- CHANGELOG.md | 1 + .../admin/cluster/node/tasks/ResourceAwareTasksTests.java | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b656c3b8ebb8e..9e4a26caa2b10 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -111,6 +111,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Removed ### Fixed +- Fix flaky ResourceAwareTasksTests.testBasicTaskResourceTracking test ([#8993](https://github.com/opensearch-project/OpenSearch/pull/8993)) ### Security diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/ResourceAwareTasksTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/ResourceAwareTasksTests.java index 96f2365412e7b..485aae433aa16 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/ResourceAwareTasksTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/ResourceAwareTasksTests.java @@ -672,8 +672,8 @@ private void assertTasksRequestFinishedSuccessfully(NodesResponse nodesResponse, } private void assertMemoryUsageWithinLimits(long actual, long expected) { - // 5% buffer up to 200 KB to account for classloading overhead. - long maxOverhead = Math.min(200000, expected * 5 / 100); + // 5% buffer up to 500 KB to account for classloading overhead. + long maxOverhead = Math.min(500000, expected * 5 / 100); assertThat(actual, lessThanOrEqualTo(expected + maxOverhead)); } From 77074b4a81f88bab44e9e39e0b6eb9323880e740 Mon Sep 17 00:00:00 2001 From: Shourya <114977491+shourya035@users.noreply.github.com> Date: Tue, 1 Aug 2023 17:45:40 +0530 Subject: [PATCH 34/75] [Remote Store] Add Segment download stats to remotestore stats API (#8718) --------- Signed-off-by: Shourya Dutta Biswas <114977491+shourya035@users.noreply.github.com> Signed-off-by: Shourya <114977491+shourya035@users.noreply.github.com> Signed-off-by: Ashish Singh Co-authored-by: Ashish Singh --- .../RemoteStoreBackpressureIT.java | 10 +- .../remotestore/RemoteStoreStatsIT.java | 491 +++++++++++++++++- .../remotestore/stats/RemoteStoreStats.java | 172 ++++-- .../stats/RemoteStoreStatsResponse.java | 51 +- .../TransportRemoteStoreStatsAction.java | 11 +- .../RemoteRefreshSegmentPressureService.java | 49 +- ...java => RemoteSegmentTransferTracker.java} | 31 +- .../shard/RemoteStoreRefreshListener.java | 6 +- .../store/DirectoryFileTransferTracker.java | 195 +++++++ .../org/opensearch/index/store/Store.java | 54 +- .../stats/RemoteStoreStatsResponseTests.java | 114 +++- .../stats/RemoteStoreStatsTestHelper.java | 290 +++++++++-- .../stats/RemoteStoreStatsTests.java | 190 ++++++- .../TransportRemoteStoreStatsActionTests.java | 14 +- ...oteRefreshSegmentPressureServiceTests.java | 6 +- ...=> RemoteSegmentTransferTrackerTests.java} | 218 ++++++-- .../RemoteStoreRefreshListenerTests.java | 14 +- 17 files changed, 1656 insertions(+), 260 deletions(-) rename server/src/main/java/org/opensearch/index/remote/{RemoteRefreshSegmentTracker.java => RemoteSegmentTransferTracker.java} (94%) create mode 100644 server/src/main/java/org/opensearch/index/store/DirectoryFileTransferTracker.java rename server/src/test/java/org/opensearch/index/remote/{RemoteRefreshSegmentTrackerTests.java => RemoteSegmentTransferTrackerTests.java} (66%) diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBackpressureIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBackpressureIT.java index 608d7d9d02581..9641c013bf226 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBackpressureIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBackpressureIT.java @@ -17,7 +17,7 @@ import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; -import org.opensearch.index.remote.RemoteRefreshSegmentTracker; +import org.opensearch.index.remote.RemoteSegmentTransferTracker; import org.opensearch.repositories.RepositoriesService; import org.opensearch.snapshots.mockstore.MockRepository; import org.opensearch.test.OpenSearchIntegTestCase; @@ -92,7 +92,7 @@ private void validateBackpressure( assertTrue(ex.getMessage().contains("rejected execution on primary shard")); assertTrue(ex.getMessage().contains(breachMode)); - RemoteRefreshSegmentTracker.Stats stats = stats(); + RemoteSegmentTransferTracker.Stats stats = stats(); assertTrue(stats.bytesLag > 0); assertTrue(stats.refreshTimeLagMs > 0); assertTrue(stats.localRefreshNumber - stats.remoteRefreshNumber > 0); @@ -102,7 +102,7 @@ private void validateBackpressure( .setRandomControlIOExceptionRate(0d); assertBusy(() -> { - RemoteRefreshSegmentTracker.Stats finalStats = stats(); + RemoteSegmentTransferTracker.Stats finalStats = stats(); assertEquals(0, finalStats.bytesLag); assertEquals(0, finalStats.refreshTimeLagMs); assertEquals(0, finalStats.localRefreshNumber - finalStats.remoteRefreshNumber); @@ -115,11 +115,11 @@ private void validateBackpressure( deleteRepo(); } - private RemoteRefreshSegmentTracker.Stats stats() { + private RemoteSegmentTransferTracker.Stats stats() { String shardId = "0"; RemoteStoreStatsResponse response = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, shardId).get(); final String indexShardId = String.format(Locale.ROOT, "[%s][%s]", INDEX_NAME, shardId); - List matches = Arrays.stream(response.getShards()) + List matches = Arrays.stream(response.getRemoteStoreStats()) .filter(stat -> indexShardId.equals(stat.getStats().shardId.toString())) .collect(Collectors.toList()); assertEquals(1, matches.size()); diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java index 76ef153fab963..840e3a07ed255 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java @@ -9,18 +9,32 @@ package org.opensearch.remotestore; import org.junit.Before; +import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreRequest; import org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStats; import org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsRequestBuilder; import org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsResponse; +import org.opensearch.action.support.PlainActionFuture; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.index.remote.RemoteRefreshSegmentTracker; +import org.opensearch.cluster.routing.ShardRouting; +import org.opensearch.cluster.routing.ShardRoutingState; +import org.opensearch.cluster.routing.allocation.command.MoveAllocationCommand; +import org.opensearch.common.settings.Settings; +import org.opensearch.index.IndexSettings; +import org.opensearch.index.remote.RemoteSegmentTransferTracker; import org.opensearch.test.OpenSearchIntegTestCase; +import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 3) public class RemoteStoreStatsIT extends RemoteStoreBaseIntegTestCase { @@ -50,14 +64,41 @@ public void testStatsResponseFromAllNodes() { for (String node : nodes) { RemoteStoreStatsResponse response = client(node).admin().cluster().prepareRemoteStoreStats(INDEX_NAME, shardId).get(); assertTrue(response.getSuccessfulShards() > 0); - assertTrue(response.getShards() != null && response.getShards().length != 0); + assertTrue(response.getRemoteStoreStats() != null && response.getRemoteStoreStats().length != 0); final String indexShardId = String.format(Locale.ROOT, "[%s][%s]", INDEX_NAME, shardId); - List matches = Arrays.stream(response.getShards()) + List matches = Arrays.stream(response.getRemoteStoreStats()) .filter(stat -> indexShardId.equals(stat.getStats().shardId.toString())) .collect(Collectors.toList()); assertEquals(1, matches.size()); - RemoteRefreshSegmentTracker.Stats stats = matches.get(0).getStats(); - assertResponseStats(stats); + RemoteSegmentTransferTracker.Stats stats = matches.get(0).getStats(); + validateUploadStats(stats); + assertEquals(0, stats.directoryFileTransferTrackerStats.transferredBytesStarted); + } + + // Step 3 - Enable replicas on the existing indices and ensure that download + // stats are being populated as well + changeReplicaCountAndEnsureGreen(1); + for (String node : nodes) { + RemoteStoreStatsResponse response = client(node).admin().cluster().prepareRemoteStoreStats(INDEX_NAME, shardId).get(); + assertTrue(response.getSuccessfulShards() > 0); + assertTrue(response.getRemoteStoreStats() != null && response.getRemoteStoreStats().length != 0); + final String indexShardId = String.format(Locale.ROOT, "[%s][%s]", INDEX_NAME, shardId); + List matches = Arrays.stream(response.getRemoteStoreStats()) + .filter(stat -> indexShardId.equals(stat.getStats().shardId.toString())) + .collect(Collectors.toList()); + assertEquals(2, matches.size()); + for (RemoteStoreStats stat : matches) { + ShardRouting routing = stat.getShardRouting(); + validateShardRouting(routing); + RemoteSegmentTransferTracker.Stats stats = stat.getStats(); + if (routing.primary()) { + validateUploadStats(stats); + assertEquals(0, stats.directoryFileTransferTrackerStats.transferredBytesStarted); + } else { + validateDownloadStats(stats); + assertEquals(0, stats.totalUploadsStarted); + } + } } } @@ -79,10 +120,31 @@ public void testStatsResponseAllShards() { .cluster() .prepareRemoteStoreStats(INDEX_NAME, null); RemoteStoreStatsResponse response = remoteStoreStatsRequestBuilder.get(); - assertTrue(response.getSuccessfulShards() == 3); - assertTrue(response.getShards() != null && response.getShards().length == 3); - RemoteRefreshSegmentTracker.Stats stats = response.getShards()[0].getStats(); - assertResponseStats(stats); + assertEquals(3, response.getSuccessfulShards()); + assertTrue(response.getRemoteStoreStats() != null && response.getRemoteStoreStats().length == 3); + RemoteSegmentTransferTracker.Stats stats = response.getRemoteStoreStats()[0].getStats(); + validateUploadStats(stats); + assertEquals(0, stats.directoryFileTransferTrackerStats.transferredBytesStarted); + + // Step 3 - Enable replicas on the existing indices and ensure that download + // stats are being populated as well + changeReplicaCountAndEnsureGreen(1); + response = client(node).admin().cluster().prepareRemoteStoreStats(INDEX_NAME, null).get(); + assertEquals(6, response.getSuccessfulShards()); + assertTrue(response.getRemoteStoreStats() != null && response.getRemoteStoreStats().length == 6); + for (RemoteStoreStats stat : response.getRemoteStoreStats()) { + ShardRouting routing = stat.getShardRouting(); + validateShardRouting(routing); + stats = stat.getStats(); + if (routing.primary()) { + validateUploadStats(stats); + assertEquals(0, stats.directoryFileTransferTrackerStats.transferredBytesStarted); + } else { + validateDownloadStats(stats); + assertEquals(0, stats.totalUploadsStarted); + } + } + } public void testStatsResponseFromLocalNode() { @@ -105,29 +167,398 @@ public void testStatsResponseFromLocalNode() { .prepareRemoteStoreStats(INDEX_NAME, null); remoteStoreStatsRequestBuilder.setLocal(true); RemoteStoreStatsResponse response = remoteStoreStatsRequestBuilder.get(); - assertTrue(response.getSuccessfulShards() == 1); - assertTrue(response.getShards() != null && response.getShards().length == 1); - RemoteRefreshSegmentTracker.Stats stats = response.getShards()[0].getStats(); - assertResponseStats(stats); + assertEquals(1, response.getSuccessfulShards()); + assertTrue(response.getRemoteStoreStats() != null && response.getRemoteStoreStats().length == 1); + RemoteSegmentTransferTracker.Stats stats = response.getRemoteStoreStats()[0].getStats(); + validateUploadStats(stats); + assertEquals(0, stats.directoryFileTransferTrackerStats.transferredBytesStarted); + } + changeReplicaCountAndEnsureGreen(1); + for (String node : nodes) { + RemoteStoreStatsRequestBuilder remoteStoreStatsRequestBuilder = client(node).admin() + .cluster() + .prepareRemoteStoreStats(INDEX_NAME, null); + remoteStoreStatsRequestBuilder.setLocal(true); + RemoteStoreStatsResponse response = remoteStoreStatsRequestBuilder.get(); + assertTrue(response.getSuccessfulShards() > 0); + assertTrue(response.getRemoteStoreStats() != null && response.getRemoteStoreStats().length != 0); + for (RemoteStoreStats stat : response.getRemoteStoreStats()) { + ShardRouting routing = stat.getShardRouting(); + validateShardRouting(routing); + RemoteSegmentTransferTracker.Stats stats = stat.getStats(); + if (routing.primary()) { + validateUploadStats(stats); + assertEquals(0, stats.directoryFileTransferTrackerStats.transferredBytesStarted); + } else { + validateDownloadStats(stats); + assertEquals(0, stats.totalUploadsStarted); + } + } + } + } + + public void testDownloadStatsCorrectnessSinglePrimarySingleReplica() throws Exception { + // Scenario: + // - Create index with single primary and single replica shard + // - Disable Refresh Interval for the index + // - Index documents + // - Trigger refresh and flush + // - Assert that download stats == upload stats + // - Repeat this step for random times (between 5 and 10) + + // Create index with 1 pri and 1 replica and refresh interval disabled + createIndex( + INDEX_NAME, + Settings.builder().put(remoteStoreIndexSettings(1, 1)).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1).build() + ); + ensureGreen(INDEX_NAME); + + // Manually invoke a refresh + refresh(INDEX_NAME); + + // Get zero state values + // Extract and assert zero state primary stats + RemoteStoreStatsResponse zeroStateResponse = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, "0").get(); + RemoteSegmentTransferTracker.Stats zeroStatePrimaryStats = Arrays.stream(zeroStateResponse.getRemoteStoreStats()) + .filter(remoteStoreStats -> remoteStoreStats.getShardRouting().primary()) + .collect(Collectors.toList()) + .get(0) + .getStats(); + assertTrue( + zeroStatePrimaryStats.totalUploadsStarted == zeroStatePrimaryStats.totalUploadsSucceeded + && zeroStatePrimaryStats.totalUploadsSucceeded == 1 + ); + assertTrue( + zeroStatePrimaryStats.uploadBytesStarted == zeroStatePrimaryStats.uploadBytesSucceeded + && zeroStatePrimaryStats.uploadBytesSucceeded > 0 + ); + assertTrue(zeroStatePrimaryStats.totalUploadsFailed == 0 && zeroStatePrimaryStats.uploadBytesFailed == 0); + + // Extract and assert zero state replica stats + RemoteSegmentTransferTracker.Stats zeroStateReplicaStats = Arrays.stream(zeroStateResponse.getRemoteStoreStats()) + .filter(remoteStoreStats -> !remoteStoreStats.getShardRouting().primary()) + .collect(Collectors.toList()) + .get(0) + .getStats(); + assertTrue( + zeroStateReplicaStats.directoryFileTransferTrackerStats.transferredBytesStarted == 0 + && zeroStateReplicaStats.directoryFileTransferTrackerStats.transferredBytesSucceeded == 0 + ); + + // Index documents + for (int i = 1; i <= randomIntBetween(5, 10); i++) { + indexSingleDoc(INDEX_NAME); + // Running Flush & Refresh manually + flushAndRefresh(INDEX_NAME); + ensureGreen(INDEX_NAME); + + // Poll for RemoteStore Stats + assertBusy(() -> { + RemoteStoreStatsResponse response = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, "0").get(); + // Iterate through the response and extract the relevant segment upload and download stats + List primaryStatsList = Arrays.stream(response.getRemoteStoreStats()) + .filter(remoteStoreStats -> remoteStoreStats.getShardRouting().primary()) + .collect(Collectors.toList()); + assertEquals(1, primaryStatsList.size()); + List replicaStatsList = Arrays.stream(response.getRemoteStoreStats()) + .filter(remoteStoreStats -> !remoteStoreStats.getShardRouting().primary()) + .collect(Collectors.toList()); + assertEquals(1, replicaStatsList.size()); + RemoteSegmentTransferTracker.Stats primaryStats = primaryStatsList.get(0).getStats(); + RemoteSegmentTransferTracker.Stats replicaStats = replicaStatsList.get(0).getStats(); + // Assert Upload syncs - zero state uploads == download syncs + assertTrue(primaryStats.totalUploadsStarted > 0); + assertTrue(primaryStats.totalUploadsSucceeded > 0); + assertTrue( + replicaStats.directoryFileTransferTrackerStats.transferredBytesStarted > 0 + && primaryStats.uploadBytesStarted + - zeroStatePrimaryStats.uploadBytesStarted == replicaStats.directoryFileTransferTrackerStats.transferredBytesStarted + ); + assertTrue( + replicaStats.directoryFileTransferTrackerStats.transferredBytesSucceeded > 0 + && primaryStats.uploadBytesSucceeded + - zeroStatePrimaryStats.uploadBytesSucceeded == replicaStats.directoryFileTransferTrackerStats.transferredBytesSucceeded + ); + // Assert zero failures + assertEquals(0, primaryStats.uploadBytesFailed); + assertEquals(0, replicaStats.directoryFileTransferTrackerStats.transferredBytesFailed); + }, 60, TimeUnit.SECONDS); + } + } + + public void testDownloadStatsCorrectnessSinglePrimaryMultipleReplicaShards() throws Exception { + // Scenario: + // - Create index with single primary and N-1 replica shards (N = no of data nodes) + // - Disable Refresh Interval for the index + // - Index documents + // - Trigger refresh and flush + // - Assert that download stats == upload stats + // - Repeat this step for random times (between 5 and 10) + + // Create index + int dataNodeCount = client().admin().cluster().prepareHealth().get().getNumberOfDataNodes(); + createIndex( + INDEX_NAME, + Settings.builder() + .put(remoteStoreIndexSettings(dataNodeCount - 1, 1)) + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1) + .build() + ); + ensureGreen(INDEX_NAME); + + // Manually invoke a refresh + refresh(INDEX_NAME); + + // Get zero state values + // Extract and assert zero state primary stats + RemoteStoreStatsResponse zeroStateResponse = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, "0").get(); + RemoteSegmentTransferTracker.Stats zeroStatePrimaryStats = Arrays.stream(zeroStateResponse.getRemoteStoreStats()) + .filter(remoteStoreStats -> remoteStoreStats.getShardRouting().primary()) + .collect(Collectors.toList()) + .get(0) + .getStats(); + assertTrue( + zeroStatePrimaryStats.totalUploadsStarted == zeroStatePrimaryStats.totalUploadsSucceeded + && zeroStatePrimaryStats.totalUploadsSucceeded == 1 + ); + assertTrue( + zeroStatePrimaryStats.uploadBytesStarted == zeroStatePrimaryStats.uploadBytesSucceeded + && zeroStatePrimaryStats.uploadBytesSucceeded > 0 + ); + assertTrue(zeroStatePrimaryStats.totalUploadsFailed == 0 && zeroStatePrimaryStats.uploadBytesFailed == 0); + + // Extract and assert zero state replica stats + List zeroStateReplicaStats = Arrays.stream(zeroStateResponse.getRemoteStoreStats()) + .filter(remoteStoreStats -> !remoteStoreStats.getShardRouting().primary()) + .collect(Collectors.toList()); + zeroStateReplicaStats.forEach(stats -> { + assertTrue( + stats.getStats().directoryFileTransferTrackerStats.transferredBytesStarted == 0 + && stats.getStats().directoryFileTransferTrackerStats.transferredBytesSucceeded == 0 + ); + }); + + int currentNodesInCluster = client().admin().cluster().prepareHealth().get().getNumberOfDataNodes(); + for (int i = 0; i < randomIntBetween(5, 10); i++) { + indexSingleDoc(INDEX_NAME); + // Running Flush & Refresh manually + flushAndRefresh(INDEX_NAME); + + assertBusy(() -> { + RemoteStoreStatsResponse response = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, "0").get(); + assertEquals(currentNodesInCluster, response.getSuccessfulShards()); + long uploadsStarted = 0, uploadsSucceeded = 0, uploadsFailed = 0; + long uploadBytesStarted = 0, uploadBytesSucceeded = 0, uploadBytesFailed = 0; + List downloadBytesStarted = new ArrayList<>(), downloadBytesSucceeded = new ArrayList<>(), downloadBytesFailed = + new ArrayList<>(); + + // Assert that stats for primary shard and replica shard set are equal + for (RemoteStoreStats eachStatsObject : response.getRemoteStoreStats()) { + RemoteSegmentTransferTracker.Stats stats = eachStatsObject.getStats(); + if (eachStatsObject.getShardRouting().primary()) { + uploadBytesStarted = stats.uploadBytesStarted; + uploadBytesSucceeded = stats.uploadBytesSucceeded; + uploadBytesFailed = stats.uploadBytesFailed; + } else { + downloadBytesStarted.add(stats.directoryFileTransferTrackerStats.transferredBytesStarted); + downloadBytesSucceeded.add(stats.directoryFileTransferTrackerStats.transferredBytesSucceeded); + downloadBytesFailed.add(stats.directoryFileTransferTrackerStats.transferredBytesFailed); + } + } + + assertEquals(0, uploadsFailed); + assertEquals(0, uploadBytesFailed); + for (int j = 0; j < response.getSuccessfulShards() - 1; j++) { + assertEquals(uploadBytesStarted - zeroStatePrimaryStats.uploadBytesStarted, (long) downloadBytesStarted.get(j)); + assertEquals(uploadBytesSucceeded - zeroStatePrimaryStats.uploadBytesSucceeded, (long) downloadBytesSucceeded.get(j)); + assertEquals(0, (long) downloadBytesFailed.get(j)); + } + }, 60, TimeUnit.SECONDS); } } + public void testStatsOnShardRelocation() { + // Scenario: + // - Create index with single primary and single replica shard + // - Index documents + // - Reroute replica shard to one of the remaining nodes + // - Assert that remote store stats reflects the new node ID + + // Create index + createIndex(INDEX_NAME, remoteStoreIndexSettings(1, 1)); + ensureGreen(INDEX_NAME); + // Index docs + indexDocs(); + + // Fetch current set of nodes in the cluster + List currentNodesInCluster = getClusterState().nodes() + .getDataNodes() + .values() + .stream() + .map(DiscoveryNode::getId) + .collect(Collectors.toList()); + DiscoveryNode[] discoveryNodesForIndex = client().admin().cluster().prepareSearchShards(INDEX_NAME).get().getNodes(); + + // Fetch nodes with shard copies of the created index + List nodeIdsWithShardCopies = new ArrayList<>(); + Arrays.stream(discoveryNodesForIndex).forEach(eachNode -> nodeIdsWithShardCopies.add(eachNode.getId())); + + // Fetch nodes which does not have any copies of the index + List nodeIdsWithoutShardCopy = currentNodesInCluster.stream() + .filter(eachNode -> !nodeIdsWithShardCopies.contains(eachNode)) + .collect(Collectors.toList()); + assertEquals(1, nodeIdsWithoutShardCopy.size()); + + // Manually reroute shard to a node which does not have any shard copy at present + ShardRouting replicaShardRouting = getClusterState().routingTable() + .index(INDEX_NAME) + .shard(0) + .assignedShards() + .stream() + .filter(shard -> !shard.primary()) + .collect(Collectors.toList()) + .get(0); + String sourceNode = replicaShardRouting.currentNodeId(); + String destinationNode = nodeIdsWithoutShardCopy.get(0); + relocateShard(0, sourceNode, destinationNode); + RemoteStoreStats[] allShardsStats = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, "0").get().getRemoteStoreStats(); + RemoteStoreStats replicaShardStat = Arrays.stream(allShardsStats) + .filter(eachStat -> !eachStat.getShardRouting().primary()) + .collect(Collectors.toList()) + .get(0); + + // Assert that remote store stats reflect the new shard state + assertEquals(ShardRoutingState.STARTED, replicaShardStat.getShardRouting().state()); + assertEquals(destinationNode, replicaShardStat.getShardRouting().currentNodeId()); + } + + public void testStatsOnShardUnassigned() throws IOException { + // Scenario: + // - Create index with single primary and two replica shard + // - Index documents + // - Stop one data node + // - Assert: + // a. Total shard Count in the response object is equal to the previous node count + // b. Successful shard count in the response object is equal to the new node count + createIndex(INDEX_NAME, remoteStoreIndexSettings(2, 1)); + ensureGreen(INDEX_NAME); + indexDocs(); + int dataNodeCountBeforeStop = client().admin().cluster().prepareHealth().get().getNumberOfDataNodes(); + internalCluster().stopRandomDataNode(); + RemoteStoreStatsResponse response = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, "0").get(); + int dataNodeCountAfterStop = client().admin().cluster().prepareHealth().get().getNumberOfDataNodes(); + assertEquals(dataNodeCountBeforeStop, response.getTotalShards()); + assertEquals(dataNodeCountAfterStop, response.getSuccessfulShards()); + } + + public void testStatsOnRemoteStoreRestore() throws IOException { + // Creating an index with primary shard count == total nodes in cluster and 0 replicas + int dataNodeCount = client().admin().cluster().prepareHealth().get().getNumberOfDataNodes(); + createIndex(INDEX_NAME, remoteStoreIndexSettings(0, dataNodeCount)); + ensureGreen(INDEX_NAME); + + // Index some docs to ensure segments being uploaded to remote store + indexDocs(); + refresh(INDEX_NAME); + + // Stop one data node to force the index into a red state + internalCluster().stopRandomDataNode(); + ensureRed(INDEX_NAME); + + // Start another data node to fulfil the previously launched capacity + internalCluster().startDataOnlyNode(); + + // Restore index from remote store + assertAcked(client().admin().indices().prepareClose(INDEX_NAME)); + client().admin() + .cluster() + .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(INDEX_NAME).restoreAllShards(true), PlainActionFuture.newFuture()); + + // Ensure that the index is green + ensureGreen(INDEX_NAME); + + // Index some more docs to force segment uploads to remote store + indexDocs(); + + RemoteStoreStatsResponse remoteStoreStatsResponse = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, "0").get(); + Arrays.stream(remoteStoreStatsResponse.getRemoteStoreStats()).forEach(statObject -> { + RemoteSegmentTransferTracker.Stats segmentTracker = statObject.getStats(); + // Assert that we have both upload and download stats for the index + assertTrue( + segmentTracker.totalUploadsStarted > 0 && segmentTracker.totalUploadsSucceeded > 0 && segmentTracker.totalUploadsFailed == 0 + ); + assertTrue( + segmentTracker.directoryFileTransferTrackerStats.transferredBytesStarted > 0 + && segmentTracker.directoryFileTransferTrackerStats.transferredBytesSucceeded > 0 + ); + }); + } + + public void testNonZeroPrimaryStatsOnNewlyCreatedIndexWithZeroDocs() throws Exception { + // Create an index with one primary and one replica shard + createIndex(INDEX_NAME, remoteStoreIndexSettings(1, 1)); + ensureGreen(INDEX_NAME); + refresh(INDEX_NAME); + + // Ensure that the index has 0 documents in it + assertEquals(0, client().admin().indices().prepareStats(INDEX_NAME).get().getTotal().docs.getCount()); + + // Assert that within 5 seconds the download and upload stats moves to a non-zero value + assertBusy(() -> { + RemoteStoreStats[] remoteStoreStats = client().admin() + .cluster() + .prepareRemoteStoreStats(INDEX_NAME, "0") + .get() + .getRemoteStoreStats(); + Arrays.stream(remoteStoreStats).forEach(statObject -> { + RemoteSegmentTransferTracker.Stats segmentTracker = statObject.getStats(); + if (statObject.getShardRouting().primary()) { + assertTrue( + segmentTracker.totalUploadsSucceeded == 1 + && segmentTracker.totalUploadsStarted == segmentTracker.totalUploadsSucceeded + && segmentTracker.totalUploadsFailed == 0 + ); + } else { + assertTrue( + segmentTracker.directoryFileTransferTrackerStats.transferredBytesStarted == 0 + && segmentTracker.directoryFileTransferTrackerStats.transferredBytesSucceeded == 0 + ); + } + }); + }, 5, TimeUnit.SECONDS); + } + private void indexDocs() { - // Indexing documents along with refreshes and flushes. for (int i = 0; i < randomIntBetween(5, 10); i++) { if (randomBoolean()) { flush(INDEX_NAME); } else { refresh(INDEX_NAME); } - int numberOfOperations = randomIntBetween(20, 50); + int numberOfOperations = randomIntBetween(10, 30); for (int j = 0; j < numberOfOperations; j++) { indexSingleDoc(INDEX_NAME); } } } - private void assertResponseStats(RemoteRefreshSegmentTracker.Stats stats) { + private void changeReplicaCountAndEnsureGreen(int replicaCount) { + assertAcked( + client().admin() + .indices() + .prepareUpdateSettings(INDEX_NAME) + .setSettings(Settings.builder().put(SETTING_NUMBER_OF_REPLICAS, replicaCount)) + ); + ensureYellowAndNoInitializingShards(INDEX_NAME); + ensureGreen(INDEX_NAME); + } + + private void relocateShard(int shardId, String sourceNode, String destNode) { + assertAcked(client().admin().cluster().prepareReroute().add(new MoveAllocationCommand(INDEX_NAME, shardId, sourceNode, destNode))); + ensureGreen(INDEX_NAME); + } + + private void validateUploadStats(RemoteSegmentTransferTracker.Stats stats) { assertEquals(0, stats.refreshTimeLagMs); assertEquals(stats.localRefreshNumber, stats.remoteRefreshNumber); assertTrue(stats.uploadBytesStarted > 0); @@ -143,4 +574,32 @@ private void assertResponseStats(RemoteRefreshSegmentTracker.Stats stats) { assertTrue(stats.uploadBytesPerSecMovingAverage > 0); assertTrue(stats.uploadTimeMovingAverage > 0); } + + private void validateDownloadStats(RemoteSegmentTransferTracker.Stats stats) { + assertTrue(stats.directoryFileTransferTrackerStats.lastTransferTimestampMs > 0); + assertTrue(stats.directoryFileTransferTrackerStats.transferredBytesStarted > 0); + assertTrue(stats.directoryFileTransferTrackerStats.transferredBytesSucceeded > 0); + assertEquals(stats.directoryFileTransferTrackerStats.transferredBytesFailed, 0); + assertTrue(stats.directoryFileTransferTrackerStats.lastSuccessfulTransferInBytes > 0); + assertTrue(stats.directoryFileTransferTrackerStats.transferredBytesMovingAverage > 0); + assertTrue(stats.directoryFileTransferTrackerStats.transferredBytesPerSecMovingAverage > 0); + } + + // Validate if the shardRouting obtained from cluster state contains the exact same routing object + // parameters as obtained from the remote store stats API + private void validateShardRouting(ShardRouting routing) { + Stream currentRoutingTable = getClusterState().routingTable() + .getIndicesRouting() + .get(INDEX_NAME) + .shard(routing.id()) + .assignedShards() + .stream(); + assertTrue( + currentRoutingTable.anyMatch( + r -> (r.currentNodeId().equals(routing.currentNodeId()) + && r.state().equals(routing.state()) + && r.primary() == routing.primary()) + ) + ); + } } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStats.java b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStats.java index 5ac9c1cf5f74c..6b4c9a26ab19b 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStats.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStats.java @@ -11,9 +11,10 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.index.remote.RemoteRefreshSegmentTracker; +import org.opensearch.index.remote.RemoteSegmentTransferTracker; import java.io.IOException; @@ -24,72 +25,128 @@ */ public class RemoteStoreStats implements Writeable, ToXContentFragment { - private final RemoteRefreshSegmentTracker.Stats remoteSegmentUploadShardStats; + private final RemoteSegmentTransferTracker.Stats remoteSegmentShardStats; - public RemoteStoreStats(RemoteRefreshSegmentTracker.Stats remoteSegmentUploadShardStats) { - this.remoteSegmentUploadShardStats = remoteSegmentUploadShardStats; + private final ShardRouting shardRouting; + + public RemoteStoreStats(RemoteSegmentTransferTracker.Stats remoteSegmentUploadShardStats, ShardRouting shardRouting) { + this.remoteSegmentShardStats = remoteSegmentUploadShardStats; + this.shardRouting = shardRouting; } public RemoteStoreStats(StreamInput in) throws IOException { - remoteSegmentUploadShardStats = in.readOptionalWriteable(RemoteRefreshSegmentTracker.Stats::new); + this.remoteSegmentShardStats = in.readOptionalWriteable(RemoteSegmentTransferTracker.Stats::new); + this.shardRouting = new ShardRouting(in); + } + + public RemoteSegmentTransferTracker.Stats getStats() { + return remoteSegmentShardStats; } - public RemoteRefreshSegmentTracker.Stats getStats() { - return remoteSegmentUploadShardStats; + public ShardRouting getShardRouting() { + return shardRouting; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject() - .field(Fields.SHARD_ID, remoteSegmentUploadShardStats.shardId) - .field(Fields.LOCAL_REFRESH_TIMESTAMP, remoteSegmentUploadShardStats.localRefreshClockTimeMs) - .field(Fields.REMOTE_REFRESH_TIMESTAMP, remoteSegmentUploadShardStats.remoteRefreshClockTimeMs) - .field(Fields.REFRESH_TIME_LAG_IN_MILLIS, remoteSegmentUploadShardStats.refreshTimeLagMs) - .field(Fields.REFRESH_LAG, remoteSegmentUploadShardStats.localRefreshNumber - remoteSegmentUploadShardStats.remoteRefreshNumber) - .field(Fields.BYTES_LAG, remoteSegmentUploadShardStats.bytesLag) - - .field(Fields.BACKPRESSURE_REJECTION_COUNT, remoteSegmentUploadShardStats.rejectionCount) - .field(Fields.CONSECUTIVE_FAILURE_COUNT, remoteSegmentUploadShardStats.consecutiveFailuresCount); - - builder.startObject(Fields.TOTAL_REMOTE_REFRESH); - builder.field(SubFields.STARTED, remoteSegmentUploadShardStats.totalUploadsStarted) - .field(SubFields.SUCCEEDED, remoteSegmentUploadShardStats.totalUploadsSucceeded) - .field(SubFields.FAILED, remoteSegmentUploadShardStats.totalUploadsFailed); + builder.startObject(); + buildShardRouting(builder); + builder.startObject(Fields.SEGMENT); + builder.startObject(SubFields.DOWNLOAD); + // Ensuring that we are not showing 0 metrics to the user + if (remoteSegmentShardStats.directoryFileTransferTrackerStats.transferredBytesStarted != 0) { + buildDownloadStats(builder); + } + builder.endObject(); + builder.startObject(SubFields.UPLOAD); + // Ensuring that we are not showing 0 metrics to the user + if (remoteSegmentShardStats.totalUploadsStarted != 0) { + buildUploadStats(builder); + } builder.endObject(); - - builder.startObject(Fields.TOTAL_UPLOADS_IN_BYTES); - builder.field(SubFields.STARTED, remoteSegmentUploadShardStats.uploadBytesStarted) - .field(SubFields.SUCCEEDED, remoteSegmentUploadShardStats.uploadBytesSucceeded) - .field(SubFields.FAILED, remoteSegmentUploadShardStats.uploadBytesFailed); builder.endObject(); + return builder.endObject(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalWriteable(remoteSegmentShardStats); + shardRouting.writeTo(out); + } - builder.startObject(Fields.REMOTE_REFRESH_SIZE_IN_BYTES); - builder.field(SubFields.LAST_SUCCESSFUL, remoteSegmentUploadShardStats.lastSuccessfulRemoteRefreshBytes); - builder.field(SubFields.MOVING_AVG, remoteSegmentUploadShardStats.uploadBytesMovingAverage); + private void buildUploadStats(XContentBuilder builder) throws IOException { + builder.field(UploadStatsFields.LOCAL_REFRESH_TIMESTAMP, remoteSegmentShardStats.localRefreshClockTimeMs) + .field(UploadStatsFields.REMOTE_REFRESH_TIMESTAMP, remoteSegmentShardStats.remoteRefreshClockTimeMs) + .field(UploadStatsFields.REFRESH_TIME_LAG_IN_MILLIS, remoteSegmentShardStats.refreshTimeLagMs) + .field(UploadStatsFields.REFRESH_LAG, remoteSegmentShardStats.localRefreshNumber - remoteSegmentShardStats.remoteRefreshNumber) + .field(UploadStatsFields.BYTES_LAG, remoteSegmentShardStats.bytesLag) + .field(UploadStatsFields.BACKPRESSURE_REJECTION_COUNT, remoteSegmentShardStats.rejectionCount) + .field(UploadStatsFields.CONSECUTIVE_FAILURE_COUNT, remoteSegmentShardStats.consecutiveFailuresCount); + builder.startObject(UploadStatsFields.TOTAL_SYNCS_TO_REMOTE) + .field(SubFields.STARTED, remoteSegmentShardStats.totalUploadsStarted) + .field(SubFields.SUCCEEDED, remoteSegmentShardStats.totalUploadsSucceeded) + .field(SubFields.FAILED, remoteSegmentShardStats.totalUploadsFailed); builder.endObject(); + builder.startObject(UploadStatsFields.TOTAL_UPLOADS_IN_BYTES) + .field(SubFields.STARTED, remoteSegmentShardStats.uploadBytesStarted) + .field(SubFields.SUCCEEDED, remoteSegmentShardStats.uploadBytesSucceeded) + .field(SubFields.FAILED, remoteSegmentShardStats.uploadBytesFailed); + builder.endObject(); + builder.startObject(UploadStatsFields.REMOTE_REFRESH_SIZE_IN_BYTES) + .field(SubFields.LAST_SUCCESSFUL, remoteSegmentShardStats.lastSuccessfulRemoteRefreshBytes) + .field(SubFields.MOVING_AVG, remoteSegmentShardStats.uploadBytesMovingAverage); + builder.endObject(); + builder.startObject(UploadStatsFields.UPLOAD_LATENCY_IN_BYTES_PER_SEC) + .field(SubFields.MOVING_AVG, remoteSegmentShardStats.uploadBytesPerSecMovingAverage); + builder.endObject(); + builder.startObject(UploadStatsFields.REMOTE_REFRESH_LATENCY_IN_MILLIS) + .field(SubFields.MOVING_AVG, remoteSegmentShardStats.uploadTimeMovingAverage); + builder.endObject(); + } - builder.startObject(Fields.UPLOAD_LATENCY_IN_BYTES_PER_SEC); - builder.field(SubFields.MOVING_AVG, remoteSegmentUploadShardStats.uploadBytesPerSecMovingAverage); + private void buildDownloadStats(XContentBuilder builder) throws IOException { + builder.field( + DownloadStatsFields.LAST_SYNC_TIMESTAMP, + remoteSegmentShardStats.directoryFileTransferTrackerStats.lastTransferTimestampMs + ); + builder.startObject(DownloadStatsFields.TOTAL_DOWNLOADS_IN_BYTES) + .field(SubFields.STARTED, remoteSegmentShardStats.directoryFileTransferTrackerStats.transferredBytesStarted) + .field(SubFields.SUCCEEDED, remoteSegmentShardStats.directoryFileTransferTrackerStats.transferredBytesSucceeded) + .field(SubFields.FAILED, remoteSegmentShardStats.directoryFileTransferTrackerStats.transferredBytesFailed); builder.endObject(); - builder.startObject(Fields.REMOTE_REFRESH_LATENCY_IN_MILLIS); - builder.field(SubFields.MOVING_AVG, remoteSegmentUploadShardStats.uploadTimeMovingAverage); + builder.startObject(DownloadStatsFields.DOWNLOAD_SIZE_IN_BYTES) + .field(SubFields.LAST_SUCCESSFUL, remoteSegmentShardStats.directoryFileTransferTrackerStats.lastSuccessfulTransferInBytes) + .field(SubFields.MOVING_AVG, remoteSegmentShardStats.directoryFileTransferTrackerStats.transferredBytesMovingAverage); builder.endObject(); + builder.startObject(DownloadStatsFields.DOWNLOAD_SPEED_IN_BYTES_PER_SEC) + .field(SubFields.MOVING_AVG, remoteSegmentShardStats.directoryFileTransferTrackerStats.transferredBytesPerSecMovingAverage); builder.endObject(); + } - return builder; + private void buildShardRouting(XContentBuilder builder) throws IOException { + builder.startObject(Fields.ROUTING); + builder.field(RoutingFields.STATE, shardRouting.state()); + builder.field(RoutingFields.PRIMARY, shardRouting.primary()); + builder.field(RoutingFields.NODE_ID, shardRouting.currentNodeId()); + builder.endObject(); } - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeOptionalWriteable(remoteSegmentUploadShardStats); + static final class Fields { + static final String ROUTING = "routing"; + static final String SEGMENT = "segment"; + static final String TRANSLOG = "translog"; + } + + static final class RoutingFields { + static final String STATE = "state"; + static final String PRIMARY = "primary"; + static final String NODE_ID = "node"; } /** * Fields for remote store stats response */ - static final class Fields { - static final String SHARD_ID = "shard_id"; - + static final class UploadStatsFields { /** * Lag in terms of bytes b/w local and remote store */ @@ -128,7 +185,7 @@ static final class Fields { /** * Represents the number of remote refreshes */ - static final String TOTAL_REMOTE_REFRESH = "total_remote_refresh"; + static final String TOTAL_SYNCS_TO_REMOTE = "total_syncs_to_remote"; /** * Represents the total uploads to remote store in bytes @@ -151,21 +208,46 @@ static final class Fields { static final String REMOTE_REFRESH_LATENCY_IN_MILLIS = "remote_refresh_latency_in_millis"; } + static final class DownloadStatsFields { + /** + * Last successful sync from remote in milliseconds + */ + static final String LAST_SYNC_TIMESTAMP = "last_sync_timestamp"; + + /** + * Total bytes of segment files downloaded from the remote store for a specific shard + */ + static final String TOTAL_DOWNLOADS_IN_BYTES = "total_downloads_in_bytes"; + + /** + * Size of each segment file downloaded from the remote store + */ + static final String DOWNLOAD_SIZE_IN_BYTES = "download_size_in_bytes"; + + /** + * Speed (in bytes/sec) for segment file downloads + */ + static final String DOWNLOAD_SPEED_IN_BYTES_PER_SEC = "download_speed_in_bytes_per_sec"; + } + /** - * Reusable sub fields for {@link Fields} + * Reusable sub fields for {@link UploadStatsFields} and {@link DownloadStatsFields} */ static final class SubFields { static final String STARTED = "started"; static final String SUCCEEDED = "succeeded"; static final String FAILED = "failed"; + static final String DOWNLOAD = "download"; + static final String UPLOAD = "upload"; + /** - * Moving avg over last N values stat for a {@link Fields} + * Moving avg over last N values stat */ static final String MOVING_AVG = "moving_avg"; /** - * Most recent successful attempt stat for a {@link Fields} + * Most recent successful attempt stat */ static final String LAST_SUCCESSFUL = "last_successful"; } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponse.java index 20023e30a271e..f6613c1d2ac50 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponse.java @@ -17,7 +17,10 @@ import org.opensearch.core.xcontent.XContentBuilder; import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; /** * Remote Store stats response @@ -26,49 +29,71 @@ */ public class RemoteStoreStatsResponse extends BroadcastResponse { - private final RemoteStoreStats[] shards; + private final RemoteStoreStats[] remoteStoreStats; public RemoteStoreStatsResponse(StreamInput in) throws IOException { super(in); - shards = in.readArray(RemoteStoreStats::new, RemoteStoreStats[]::new); + remoteStoreStats = in.readArray(RemoteStoreStats::new, RemoteStoreStats[]::new); } public RemoteStoreStatsResponse( - RemoteStoreStats[] shards, + RemoteStoreStats[] remoteStoreStats, int totalShards, int successfulShards, int failedShards, List shardFailures ) { super(totalShards, successfulShards, failedShards, shardFailures); - this.shards = shards; + this.remoteStoreStats = remoteStoreStats; } - public RemoteStoreStats[] getShards() { - return this.shards; + public RemoteStoreStats[] getRemoteStoreStats() { + return this.remoteStoreStats; } - public RemoteStoreStats getAt(int position) { - return shards[position]; + public Map>> groupByIndexAndShards() { + Map>> indexWiseStats = new HashMap<>(); + for (RemoteStoreStats shardStat : remoteStoreStats) { + indexWiseStats.computeIfAbsent(shardStat.getShardRouting().getIndexName(), k -> new HashMap<>()) + .computeIfAbsent(shardStat.getShardRouting().getId(), k -> new ArrayList<>()) + .add(shardStat); + } + return indexWiseStats; } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeArray(shards); + out.writeArray(remoteStoreStats); } @Override protected void addCustomXContentFields(XContentBuilder builder, Params params) throws IOException { - builder.startArray("stats"); - for (RemoteStoreStats shard : shards) { - shard.toXContent(builder, params); + Map>> indexWiseStats = groupByIndexAndShards(); + builder.startObject(Fields.INDICES); + for (String indexName : indexWiseStats.keySet()) { + builder.startObject(indexName); + builder.startObject(Fields.SHARDS); + for (int shardId : indexWiseStats.get(indexName).keySet()) { + builder.startArray(Integer.toString(shardId)); + for (RemoteStoreStats shardStat : indexWiseStats.get(indexName).get(shardId)) { + shardStat.toXContent(builder, params); + } + builder.endArray(); + } + builder.endObject(); + builder.endObject(); } - builder.endArray(); + builder.endObject(); } @Override public String toString() { return Strings.toString(XContentType.JSON, this, true, false); } + + static final class Fields { + static final String SHARDS = "shards"; + static final String INDICES = "indices"; + } } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsAction.java index 434abd1207f50..37835a5add3d6 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsAction.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsAction.java @@ -24,7 +24,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.index.IndexService; import org.opensearch.index.remote.RemoteRefreshSegmentPressureService; -import org.opensearch.index.remote.RemoteRefreshSegmentTracker; +import org.opensearch.index.remote.RemoteSegmentTransferTracker; import org.opensearch.index.shard.IndexShard; import org.opensearch.index.shard.ShardNotFoundException; import org.opensearch.indices.IndicesService; @@ -49,6 +49,7 @@ public class TransportRemoteStoreStatsAction extends TransportBroadcastByNodeAct RemoteStoreStats> { private final IndicesService indicesService; + private final RemoteRefreshSegmentPressureService remoteRefreshSegmentPressureService; @Inject @@ -95,7 +96,6 @@ protected ShardsIterator shards(ClusterState clusterState, RemoteStoreStatsReque || (shardRouting.currentNodeId() == null || shardRouting.currentNodeId().equals(clusterState.getNodes().getLocalNodeId())) ) - .filter(ShardRouting::primary) .filter( shardRouting -> Boolean.parseBoolean( clusterState.getMetadata().index(shardRouting.index()).getSettings().get(IndexMetadata.SETTING_REMOTE_STORE_ENABLED) @@ -153,11 +153,10 @@ protected RemoteStoreStats shardOperation(RemoteStoreStatsRequest request, Shard throw new ShardNotFoundException(indexShard.shardId()); } - RemoteRefreshSegmentTracker remoteRefreshSegmentTracker = remoteRefreshSegmentPressureService.getRemoteRefreshSegmentTracker( + RemoteSegmentTransferTracker remoteSegmentTransferTracker = remoteRefreshSegmentPressureService.getRemoteRefreshSegmentTracker( indexShard.shardId() ); - assert Objects.nonNull(remoteRefreshSegmentTracker); - - return new RemoteStoreStats(remoteRefreshSegmentTracker.stats()); + assert Objects.nonNull(remoteSegmentTransferTracker); + return new RemoteStoreStats(remoteSegmentTransferTracker.stats(), indexShard.routingEntry()); } } diff --git a/server/src/main/java/org/opensearch/index/remote/RemoteRefreshSegmentPressureService.java b/server/src/main/java/org/opensearch/index/remote/RemoteRefreshSegmentPressureService.java index 3f1161f0c5e03..6f6364ac3b8a6 100644 --- a/server/src/main/java/org/opensearch/index/remote/RemoteRefreshSegmentPressureService.java +++ b/server/src/main/java/org/opensearch/index/remote/RemoteRefreshSegmentPressureService.java @@ -26,7 +26,7 @@ import java.util.function.BiConsumer; /** - * Service used to validate if the incoming indexing request should be rejected based on the {@link RemoteRefreshSegmentTracker}. + * Service used to validate if the incoming indexing request should be rejected based on the {@link RemoteSegmentTransferTracker}. * * @opensearch.internal */ @@ -37,7 +37,7 @@ public class RemoteRefreshSegmentPressureService implements IndexEventListener { /** * Keeps map of remote-backed index shards and their corresponding backpressure tracker. */ - private final Map trackerMap = ConcurrentCollections.newConcurrentMap(); + private final Map trackerMap = ConcurrentCollections.newConcurrentMap(); /** * Remote refresh segment pressure settings which is used for creation of the backpressure tracker and as well as rejection. @@ -57,12 +57,12 @@ public RemoteRefreshSegmentPressureService(ClusterService clusterService, Settin } /** - * Get {@code RemoteRefreshSegmentTracker} only if the underlying Index has remote segments integration enabled. + * Get {@code RemoteSegmentTransferTracker} only if the underlying Index has remote segments integration enabled. * * @param shardId shard id * @return the tracker if index is remote-backed, else null. */ - public RemoteRefreshSegmentTracker getRemoteRefreshSegmentTracker(ShardId shardId) { + public RemoteSegmentTransferTracker getRemoteRefreshSegmentTracker(ShardId shardId) { return trackerMap.get(shardId); } @@ -74,8 +74,9 @@ public void afterIndexShardCreated(IndexShard indexShard) { ShardId shardId = indexShard.shardId(); trackerMap.put( shardId, - new RemoteRefreshSegmentTracker( + new RemoteSegmentTransferTracker( shardId, + indexShard.store().getDirectoryFileTransferTracker(), pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -86,8 +87,8 @@ public void afterIndexShardCreated(IndexShard indexShard) { @Override public void afterIndexShardClosed(ShardId shardId, IndexShard indexShard, Settings indexSettings) { - RemoteRefreshSegmentTracker remoteRefreshSegmentTracker = trackerMap.remove(shardId); - if (remoteRefreshSegmentTracker != null) { + RemoteSegmentTransferTracker remoteSegmentTransferTracker = trackerMap.remove(shardId); + if (remoteSegmentTransferTracker != null) { logger.trace("Deleted tracker for shardId={}", shardId); } } @@ -107,34 +108,34 @@ public boolean isSegmentsUploadBackpressureEnabled() { * @param shardId shardId for which the validation needs to be done. */ public void validateSegmentsUploadLag(ShardId shardId) { - RemoteRefreshSegmentTracker remoteRefreshSegmentTracker = getRemoteRefreshSegmentTracker(shardId); + RemoteSegmentTransferTracker remoteSegmentTransferTracker = getRemoteRefreshSegmentTracker(shardId); // condition 1 - This will be null for non-remote backed indexes // condition 2 - This will be zero if the remote store is - if (remoteRefreshSegmentTracker == null || remoteRefreshSegmentTracker.getRefreshSeqNoLag() == 0) { + if (remoteSegmentTransferTracker == null || remoteSegmentTransferTracker.getRefreshSeqNoLag() == 0) { return; } for (LagValidator lagValidator : lagValidators) { - if (lagValidator.validate(remoteRefreshSegmentTracker, shardId) == false) { - remoteRefreshSegmentTracker.incrementRejectionCount(lagValidator.name()); - throw new OpenSearchRejectedExecutionException(lagValidator.rejectionMessage(remoteRefreshSegmentTracker, shardId)); + if (lagValidator.validate(remoteSegmentTransferTracker, shardId) == false) { + remoteSegmentTransferTracker.incrementRejectionCount(lagValidator.name()); + throw new OpenSearchRejectedExecutionException(lagValidator.rejectionMessage(remoteSegmentTransferTracker, shardId)); } } } void updateUploadBytesMovingAverageWindowSize(int updatedSize) { - updateMovingAverageWindowSize(RemoteRefreshSegmentTracker::updateUploadBytesMovingAverageWindowSize, updatedSize); + updateMovingAverageWindowSize(RemoteSegmentTransferTracker::updateUploadBytesMovingAverageWindowSize, updatedSize); } void updateUploadBytesPerSecMovingAverageWindowSize(int updatedSize) { - updateMovingAverageWindowSize(RemoteRefreshSegmentTracker::updateUploadBytesPerSecMovingAverageWindowSize, updatedSize); + updateMovingAverageWindowSize(RemoteSegmentTransferTracker::updateUploadBytesPerSecMovingAverageWindowSize, updatedSize); } void updateUploadTimeMsMovingAverageWindowSize(int updatedSize) { - updateMovingAverageWindowSize(RemoteRefreshSegmentTracker::updateUploadTimeMsMovingAverageWindowSize, updatedSize); + updateMovingAverageWindowSize(RemoteSegmentTransferTracker::updateUploadTimeMsMovingAverageWindowSize, updatedSize); } - void updateMovingAverageWindowSize(BiConsumer biConsumer, int updatedSize) { + void updateMovingAverageWindowSize(BiConsumer biConsumer, int updatedSize) { trackerMap.values().forEach(tracker -> biConsumer.accept(tracker, updatedSize)); } @@ -158,7 +159,7 @@ private LagValidator(RemoteRefreshSegmentPressureSettings pressureSettings) { * @param shardId shard id of the {@code IndexShard} currently being validated. * @return true if successfully validated that lag is acceptable. */ - abstract boolean validate(RemoteRefreshSegmentTracker pressureTracker, ShardId shardId); + abstract boolean validate(RemoteSegmentTransferTracker pressureTracker, ShardId shardId); /** * Returns the name of the lag validator. @@ -167,7 +168,7 @@ private LagValidator(RemoteRefreshSegmentPressureSettings pressureSettings) { */ abstract String name(); - abstract String rejectionMessage(RemoteRefreshSegmentTracker pressureTracker, ShardId shardId); + abstract String rejectionMessage(RemoteSegmentTransferTracker pressureTracker, ShardId shardId); } /** @@ -184,7 +185,7 @@ private BytesLagValidator(RemoteRefreshSegmentPressureSettings pressureSettings) } @Override - public boolean validate(RemoteRefreshSegmentTracker pressureTracker, ShardId shardId) { + public boolean validate(RemoteSegmentTransferTracker pressureTracker, ShardId shardId) { if (pressureTracker.getRefreshSeqNoLag() <= 1) { return true; } @@ -198,7 +199,7 @@ public boolean validate(RemoteRefreshSegmentTracker pressureTracker, ShardId sha } @Override - public String rejectionMessage(RemoteRefreshSegmentTracker pressureTracker, ShardId shardId) { + public String rejectionMessage(RemoteSegmentTransferTracker pressureTracker, ShardId shardId) { double dynamicBytesLagThreshold = pressureTracker.getUploadBytesAverage() * pressureSettings.getBytesLagVarianceFactor(); return String.format( Locale.ROOT, @@ -230,7 +231,7 @@ private TimeLagValidator(RemoteRefreshSegmentPressureSettings pressureSettings) } @Override - public boolean validate(RemoteRefreshSegmentTracker pressureTracker, ShardId shardId) { + public boolean validate(RemoteSegmentTransferTracker pressureTracker, ShardId shardId) { if (pressureTracker.getRefreshSeqNoLag() <= 1) { return true; } @@ -244,7 +245,7 @@ public boolean validate(RemoteRefreshSegmentTracker pressureTracker, ShardId sha } @Override - public String rejectionMessage(RemoteRefreshSegmentTracker pressureTracker, ShardId shardId) { + public String rejectionMessage(RemoteSegmentTransferTracker pressureTracker, ShardId shardId) { double dynamicTimeLagThreshold = pressureTracker.getUploadTimeMsAverage() * pressureSettings.getUploadTimeLagVarianceFactor(); return String.format( Locale.ROOT, @@ -276,14 +277,14 @@ private ConsecutiveFailureValidator(RemoteRefreshSegmentPressureSettings pressur } @Override - public boolean validate(RemoteRefreshSegmentTracker pressureTracker, ShardId shardId) { + public boolean validate(RemoteSegmentTransferTracker pressureTracker, ShardId shardId) { int failureStreakCount = pressureTracker.getConsecutiveFailureCount(); int minConsecutiveFailureThreshold = pressureSettings.getMinConsecutiveFailuresLimit(); return failureStreakCount <= minConsecutiveFailureThreshold; } @Override - public String rejectionMessage(RemoteRefreshSegmentTracker pressureTracker, ShardId shardId) { + public String rejectionMessage(RemoteSegmentTransferTracker pressureTracker, ShardId shardId) { return String.format( Locale.ROOT, "rejected execution on primary shard:%s due to remote segments lagging behind local segments." diff --git a/server/src/main/java/org/opensearch/index/remote/RemoteRefreshSegmentTracker.java b/server/src/main/java/org/opensearch/index/remote/RemoteSegmentTransferTracker.java similarity index 94% rename from server/src/main/java/org/opensearch/index/remote/RemoteRefreshSegmentTracker.java rename to server/src/main/java/org/opensearch/index/remote/RemoteSegmentTransferTracker.java index 332b0d1698800..cd5d461584f0f 100644 --- a/server/src/main/java/org/opensearch/index/remote/RemoteRefreshSegmentTracker.java +++ b/server/src/main/java/org/opensearch/index/remote/RemoteSegmentTransferTracker.java @@ -15,6 +15,7 @@ import org.opensearch.common.util.Streak; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.core.index.shard.ShardId; +import org.opensearch.index.store.DirectoryFileTransferTracker; import java.io.IOException; import java.util.HashMap; @@ -30,7 +31,7 @@ * * @opensearch.internal */ -public class RemoteRefreshSegmentTracker { +public class RemoteSegmentTransferTracker { /** * ShardId for which this instance tracks the remote segment upload metadata. @@ -169,8 +170,14 @@ public class RemoteRefreshSegmentTracker { private final Object uploadTimeMsMutex = new Object(); - public RemoteRefreshSegmentTracker( + /** + * {@link org.opensearch.index.store.Store.StoreDirectory} level file transfer tracker, used to show download stats + */ + private final DirectoryFileTransferTracker directoryFileTransferTracker; + + public RemoteSegmentTransferTracker( ShardId shardId, + DirectoryFileTransferTracker directoryFileTransferTracker, int uploadBytesMovingAverageWindowSize, int uploadBytesPerSecMovingAverageWindowSize, int uploadTimeMsMovingAverageWindowSize @@ -188,6 +195,7 @@ public RemoteRefreshSegmentTracker( uploadTimeMsMovingAverageReference = new AtomicReference<>(new MovingAverage(uploadTimeMsMovingAverageWindowSize)); latestLocalFileNameLengthMap = new HashMap<>(); + this.directoryFileTransferTracker = directoryFileTransferTracker; } ShardId getShardId() { @@ -472,8 +480,12 @@ void updateUploadTimeMsMovingAverageWindowSize(int updatedSize) { } } - public RemoteRefreshSegmentTracker.Stats stats() { - return new RemoteRefreshSegmentTracker.Stats( + public DirectoryFileTransferTracker getDirectoryFileTransferTracker() { + return directoryFileTransferTracker; + } + + public RemoteSegmentTransferTracker.Stats stats() { + return new RemoteSegmentTransferTracker.Stats( shardId, localRefreshClockTimeMs, remoteRefreshClockTimeMs, @@ -492,7 +504,8 @@ public RemoteRefreshSegmentTracker.Stats stats() { uploadBytesMovingAverageReference.get().getAverage(), uploadBytesPerSecMovingAverageReference.get().getAverage(), uploadTimeMsMovingAverageReference.get().getAverage(), - getBytesLag() + getBytesLag(), + directoryFileTransferTracker.stats() ); } @@ -522,6 +535,7 @@ public static class Stats implements Writeable { public final double uploadBytesPerSecMovingAverage; public final double uploadTimeMovingAverage; public final long bytesLag; + public final DirectoryFileTransferTracker.Stats directoryFileTransferTrackerStats; public Stats( ShardId shardId, @@ -542,7 +556,8 @@ public Stats( double uploadBytesMovingAverage, double uploadBytesPerSecMovingAverage, double uploadTimeMovingAverage, - long bytesLag + long bytesLag, + DirectoryFileTransferTracker.Stats directoryFileTransferTrackerStats ) { this.shardId = shardId; this.localRefreshClockTimeMs = localRefreshClockTimeMs; @@ -563,6 +578,7 @@ public Stats( this.uploadBytesPerSecMovingAverage = uploadBytesPerSecMovingAverage; this.uploadTimeMovingAverage = uploadTimeMovingAverage; this.bytesLag = bytesLag; + this.directoryFileTransferTrackerStats = directoryFileTransferTrackerStats; } public Stats(StreamInput in) throws IOException { @@ -586,6 +602,7 @@ public Stats(StreamInput in) throws IOException { this.uploadBytesPerSecMovingAverage = in.readDouble(); this.uploadTimeMovingAverage = in.readDouble(); this.bytesLag = in.readLong(); + this.directoryFileTransferTrackerStats = in.readOptionalWriteable(DirectoryFileTransferTracker.Stats::new); } catch (IOException e) { throw e; } @@ -612,7 +629,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeDouble(uploadBytesPerSecMovingAverage); out.writeDouble(uploadTimeMovingAverage); out.writeLong(bytesLag); + out.writeOptionalWriteable(directoryFileTransferTrackerStats); } } - } diff --git a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java index 2385b906a7ae5..4a70ff04770d3 100644 --- a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java +++ b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java @@ -28,7 +28,7 @@ import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.index.engine.EngineException; import org.opensearch.index.engine.InternalEngine; -import org.opensearch.index.remote.RemoteRefreshSegmentTracker; +import org.opensearch.index.remote.RemoteSegmentTransferTracker; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.store.RemoteSegmentStoreDirectory; import org.opensearch.index.store.remote.metadata.RemoteSegmentMetadata; @@ -87,7 +87,7 @@ public final class RemoteStoreRefreshListener extends CloseableRetryableRefreshL private final IndexShard indexShard; private final Directory storeDirectory; private final RemoteSegmentStoreDirectory remoteDirectory; - private final RemoteRefreshSegmentTracker segmentTracker; + private final RemoteSegmentTransferTracker segmentTracker; private final Map localSegmentChecksumMap; private long primaryTerm; private volatile Iterator backoffDelayIterator; @@ -104,7 +104,7 @@ public final class RemoteStoreRefreshListener extends CloseableRetryableRefreshL public RemoteStoreRefreshListener( IndexShard indexShard, SegmentReplicationCheckpointPublisher checkpointPublisher, - RemoteRefreshSegmentTracker segmentTracker + RemoteSegmentTransferTracker segmentTracker ) { super(indexShard.getThreadPool()); logger = Loggers.getLogger(getClass(), indexShard.shardId()); diff --git a/server/src/main/java/org/opensearch/index/store/DirectoryFileTransferTracker.java b/server/src/main/java/org/opensearch/index/store/DirectoryFileTransferTracker.java new file mode 100644 index 0000000000000..7e0e231d7bad9 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/store/DirectoryFileTransferTracker.java @@ -0,0 +1,195 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.store; + +import org.opensearch.common.util.MovingAverage; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; + +import java.io.IOException; + +/** + * Tracks the amount of bytes transferred between two {@link org.apache.lucene.store.Directory} instances + * + * @opensearch.internal + */ +public class DirectoryFileTransferTracker { + /** + * Cumulative size of files (in bytes) attempted to be transferred over from the source {@link org.apache.lucene.store.Directory} + */ + private volatile long transferredBytesStarted; + + /** + * Cumulative size of files (in bytes) successfully transferred over from the source {@link org.apache.lucene.store.Directory} + */ + private volatile long transferredBytesFailed; + + /** + * Cumulative size of files (in bytes) failed in transfer over from the source {@link org.apache.lucene.store.Directory} + */ + private volatile long transferredBytesSucceeded; + + /** + * Time in milliseconds for the last successful transfer from the source {@link org.apache.lucene.store.Directory} + */ + private volatile long lastTransferTimestampMs; + + /** + * Provides moving average over the last N total size in bytes of files transferred from the source {@link org.apache.lucene.store.Directory}. + * N is window size + */ + private volatile MovingAverage transferredBytesMovingAverageReference; + + private volatile long lastSuccessfulTransferInBytes; + + /** + * Provides moving average over the last N transfer speed (in bytes/s) of segment files transferred from the source {@link org.apache.lucene.store.Directory}. + * N is window size + */ + private volatile MovingAverage transferredBytesPerSecMovingAverageReference; + + private final int DIRECTORY_FILES_TRANSFER_DEFAULT_WINDOW_SIZE = 20; + + public long getTransferredBytesStarted() { + return transferredBytesStarted; + } + + public void addTransferredBytesStarted(long size) { + transferredBytesStarted += size; + } + + public long getTransferredBytesFailed() { + return transferredBytesFailed; + } + + public void addTransferredBytesFailed(long size) { + transferredBytesFailed += size; + } + + public long getTransferredBytesSucceeded() { + return transferredBytesSucceeded; + } + + public void addTransferredBytesSucceeded(long size, long startTimeInMs) { + transferredBytesSucceeded += size; + updateLastSuccessfulTransferSize(size); + long currentTimeInMs = System.currentTimeMillis(); + updateLastTransferTimestampMs(currentTimeInMs); + long timeTakenInMS = Math.max(1, currentTimeInMs - startTimeInMs); + addTransferredBytesPerSec((size * 1_000L) / timeTakenInMS); + } + + public boolean isTransferredBytesPerSecAverageReady() { + return transferredBytesPerSecMovingAverageReference.isReady(); + } + + public double getTransferredBytesPerSecAverage() { + return transferredBytesPerSecMovingAverageReference.getAverage(); + } + + // Visible for testing + public void addTransferredBytesPerSec(long bytesPerSec) { + this.transferredBytesPerSecMovingAverageReference.record(bytesPerSec); + } + + public boolean isTransferredBytesAverageReady() { + return transferredBytesMovingAverageReference.isReady(); + } + + public double getTransferredBytesAverage() { + return transferredBytesMovingAverageReference.getAverage(); + } + + // Visible for testing + public void updateLastSuccessfulTransferSize(long size) { + lastSuccessfulTransferInBytes = size; + this.transferredBytesMovingAverageReference.record(size); + } + + public long getLastTransferTimestampMs() { + return lastTransferTimestampMs; + } + + // Visible for testing + public void updateLastTransferTimestampMs(long downloadTimestampInMs) { + this.lastTransferTimestampMs = downloadTimestampInMs; + } + + public DirectoryFileTransferTracker() { + transferredBytesMovingAverageReference = new MovingAverage(DIRECTORY_FILES_TRANSFER_DEFAULT_WINDOW_SIZE); + transferredBytesPerSecMovingAverageReference = new MovingAverage(DIRECTORY_FILES_TRANSFER_DEFAULT_WINDOW_SIZE); + } + + public DirectoryFileTransferTracker.Stats stats() { + return new Stats( + transferredBytesStarted, + transferredBytesFailed, + transferredBytesSucceeded, + lastTransferTimestampMs, + transferredBytesMovingAverageReference.getAverage(), + lastSuccessfulTransferInBytes, + transferredBytesPerSecMovingAverageReference.getAverage() + ); + } + + /** + * Represents the tracker's stats presentable to an API. + * + * @opensearch.internal + */ + public static class Stats implements Writeable { + public final long transferredBytesStarted; + public final long transferredBytesFailed; + public final long transferredBytesSucceeded; + public final long lastTransferTimestampMs; + public final double transferredBytesMovingAverage; + public final long lastSuccessfulTransferInBytes; + public final double transferredBytesPerSecMovingAverage; + + public Stats( + long transferredBytesStarted, + long transferredBytesFailed, + long downloadBytesSucceeded, + long lastTransferTimestampMs, + double transferredBytesMovingAverage, + long lastSuccessfulTransferInBytes, + double transferredBytesPerSecMovingAverage + ) { + this.transferredBytesStarted = transferredBytesStarted; + this.transferredBytesFailed = transferredBytesFailed; + this.transferredBytesSucceeded = downloadBytesSucceeded; + this.lastTransferTimestampMs = lastTransferTimestampMs; + this.transferredBytesMovingAverage = transferredBytesMovingAverage; + this.lastSuccessfulTransferInBytes = lastSuccessfulTransferInBytes; + this.transferredBytesPerSecMovingAverage = transferredBytesPerSecMovingAverage; + } + + public Stats(StreamInput in) throws IOException { + this.transferredBytesStarted = in.readLong(); + this.transferredBytesFailed = in.readLong(); + this.transferredBytesSucceeded = in.readLong(); + this.lastTransferTimestampMs = in.readLong(); + this.transferredBytesMovingAverage = in.readDouble(); + this.lastSuccessfulTransferInBytes = in.readLong(); + this.transferredBytesPerSecMovingAverage = in.readDouble(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(transferredBytesStarted); + out.writeLong(transferredBytesFailed); + out.writeLong(transferredBytesSucceeded); + out.writeLong(lastTransferTimestampMs); + out.writeDouble(transferredBytesMovingAverage); + out.writeLong(lastSuccessfulTransferInBytes); + out.writeDouble(transferredBytesPerSecMovingAverage); + } + } +} diff --git a/server/src/main/java/org/opensearch/index/store/Store.java b/server/src/main/java/org/opensearch/index/store/Store.java index 8967100d4faf0..a67b87f58110c 100644 --- a/server/src/main/java/org/opensearch/index/store/Store.java +++ b/server/src/main/java/org/opensearch/index/store/Store.java @@ -963,18 +963,24 @@ public void commitSegmentInfos(SegmentInfos latestSegmentInfos, long maxSeqNo, l } } + public DirectoryFileTransferTracker getDirectoryFileTransferTracker() { + return directory.getDirectoryFileTransferTracker(); + } + /** * A store directory * * @opensearch.internal */ static final class StoreDirectory extends FilterDirectory { - private final Logger deletesLogger; + public final DirectoryFileTransferTracker directoryFileTransferTracker; + StoreDirectory(ByteSizeCachingDirectory delegateDirectory, Logger deletesLogger) { super(delegateDirectory); this.deletesLogger = deletesLogger; + this.directoryFileTransferTracker = new DirectoryFileTransferTracker(); } /** Estimate the cumulative size of all files in this directory in bytes. */ @@ -1012,6 +1018,52 @@ public Set getPendingDeletions() throws IOException { // to be removed once fixed in FilterDirectory. return unwrap(this).getPendingDeletions(); } + + public DirectoryFileTransferTracker getDirectoryFileTransferTracker() { + return directoryFileTransferTracker; + } + + @Override + public void copyFrom(Directory from, String src, String dest, IOContext context) throws IOException { + long fileSize = from.fileLength(src); + beforeDownload(fileSize); + boolean success = false; + try { + long startTime = System.currentTimeMillis(); + super.copyFrom(from, src, dest, context); + success = true; + afterDownload(fileSize, startTime); + } finally { + if (!success) { + downloadFailed(fileSize); + } + } + } + + /** + * Updates the amount of bytes attempted for download + */ + private void beforeDownload(long fileSize) { + directoryFileTransferTracker.addTransferredBytesStarted(fileSize); + } + + /** + * Updates + * - The amount of bytes that has been successfully downloaded from the source store + * - The last successful download completion timestamp + * - The last successfully downloaded file + * - Download speed (in bytes/sec) + */ + private void afterDownload(long fileSize, long startTimeInMs) { + directoryFileTransferTracker.addTransferredBytesSucceeded(fileSize, startTimeInMs); + } + + /** + * Updates the amount of bytes failed in download + */ + private void downloadFailed(long fileSize) { + directoryFileTransferTracker.addTransferredBytesFailed(fileSize); + } } /** diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponseTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponseTests.java index a476b66719d3f..64dfda86c1af9 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponseTests.java @@ -10,10 +10,11 @@ import org.opensearch.core.action.support.DefaultShardOperationFailedException; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.index.remote.RemoteRefreshSegmentTracker; +import org.opensearch.index.remote.RemoteSegmentTransferTracker; import org.opensearch.core.index.shard.ShardId; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; @@ -23,7 +24,10 @@ import java.util.Map; import static org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsTestHelper.compareStatsResponse; -import static org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsTestHelper.createPressureTrackerStats; +import static org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsTestHelper.createShardRouting; +import static org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsTestHelper.createStatsForNewPrimary; +import static org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsTestHelper.createStatsForNewReplica; +import static org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsTestHelper.createStatsForRemoteStoreRestoredPrimary; import static org.opensearch.core.xcontent.ToXContent.EMPTY_PARAMS; public class RemoteStoreStatsResponseTests extends OpenSearchTestCase { @@ -43,11 +47,12 @@ public void tearDown() throws Exception { threadPool.shutdownNow(); } - public void testSerialization() throws Exception { - RemoteRefreshSegmentTracker.Stats pressureTrackerStats = createPressureTrackerStats(shardId); - RemoteStoreStats stats = new RemoteStoreStats(pressureTrackerStats); + public void testSerializationForPrimary() throws Exception { + RemoteSegmentTransferTracker.Stats mockPrimaryTrackerStats = createStatsForNewPrimary(shardId); + ShardRouting primaryShardRouting = createShardRouting(shardId, true); + RemoteStoreStats primaryShardStats = new RemoteStoreStats(mockPrimaryTrackerStats, primaryShardRouting); RemoteStoreStatsResponse statsResponse = new RemoteStoreStatsResponse( - new RemoteStoreStats[] { stats }, + new RemoteStoreStats[] { primaryShardStats }, 1, 1, 0, @@ -58,15 +63,96 @@ public void testSerialization() throws Exception { statsResponse.toXContent(builder, EMPTY_PARAMS); Map jsonResponseObject = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()) .v2(); + Map metadataShardsObject = (Map) jsonResponseObject.get("_shards"); + assertEquals(metadataShardsObject.get("total"), 1); + assertEquals(metadataShardsObject.get("successful"), 1); + assertEquals(metadataShardsObject.get("failed"), 0); + Map indicesObject = (Map) jsonResponseObject.get("indices"); + assertTrue(indicesObject.containsKey("index")); + Map shardsObject = (Map) ((Map) indicesObject.get("index")).get("shards"); + ArrayList> perShardNumberObject = (ArrayList>) shardsObject.get("0"); + assertEquals(perShardNumberObject.size(), 1); + Map perShardCopyObject = perShardNumberObject.get(0); + compareStatsResponse(perShardCopyObject, mockPrimaryTrackerStats, primaryShardRouting); + } + + public void testSerializationForBothPrimaryAndReplica() throws Exception { + RemoteSegmentTransferTracker.Stats mockPrimaryTrackerStats = createStatsForNewPrimary(shardId); + RemoteSegmentTransferTracker.Stats mockReplicaTrackerStats = createStatsForNewReplica(shardId); + ShardRouting primaryShardRouting = createShardRouting(shardId, true); + ShardRouting replicaShardRouting = createShardRouting(shardId, false); + RemoteStoreStats primaryShardStats = new RemoteStoreStats(mockPrimaryTrackerStats, primaryShardRouting); + RemoteStoreStats replicaShardStats = new RemoteStoreStats(mockReplicaTrackerStats, replicaShardRouting); + RemoteStoreStatsResponse statsResponse = new RemoteStoreStatsResponse( + new RemoteStoreStats[] { primaryShardStats, replicaShardStats }, + 2, + 2, + 0, + new ArrayList() + ); + + XContentBuilder builder = XContentFactory.jsonBuilder(); + statsResponse.toXContent(builder, EMPTY_PARAMS); + Map jsonResponseObject = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()) + .v2(); + Map metadataShardsObject = (Map) jsonResponseObject.get("_shards"); + assertEquals(2, metadataShardsObject.get("total")); + assertEquals(2, metadataShardsObject.get("successful")); + assertEquals(0, metadataShardsObject.get("failed")); + Map indicesObject = (Map) jsonResponseObject.get("indices"); + assertTrue(indicesObject.containsKey("index")); + Map shardsObject = (Map) ((Map) indicesObject.get("index")).get("shards"); + ArrayList> perShardNumberObject = (ArrayList>) shardsObject.get("0"); + assertEquals(2, perShardNumberObject.size()); + perShardNumberObject.forEach(shardObject -> { + boolean isPrimary = (boolean) ((Map) shardObject.get(RemoteStoreStats.Fields.ROUTING)).get( + RemoteStoreStats.RoutingFields.PRIMARY + ); + if (isPrimary) { + compareStatsResponse(shardObject, mockPrimaryTrackerStats, primaryShardRouting); + } else { + compareStatsResponse(shardObject, mockReplicaTrackerStats, replicaShardRouting); + } + }); + } - ArrayList> statsObjectArray = (ArrayList>) jsonResponseObject.get("stats"); - assertEquals(statsObjectArray.size(), 1); - Map statsObject = statsObjectArray.get(0); - Map shardsObject = (Map) jsonResponseObject.get("_shards"); + public void testSerializationForBothRemoteStoreRestoredPrimaryAndReplica() throws Exception { + RemoteSegmentTransferTracker.Stats mockPrimaryTrackerStats = createStatsForRemoteStoreRestoredPrimary(shardId); + RemoteSegmentTransferTracker.Stats mockReplicaTrackerStats = createStatsForNewReplica(shardId); + ShardRouting primaryShardRouting = createShardRouting(shardId, true); + ShardRouting replicaShardRouting = createShardRouting(shardId, false); + RemoteStoreStats primaryShardStats = new RemoteStoreStats(mockPrimaryTrackerStats, primaryShardRouting); + RemoteStoreStats replicaShardStats = new RemoteStoreStats(mockReplicaTrackerStats, replicaShardRouting); + RemoteStoreStatsResponse statsResponse = new RemoteStoreStatsResponse( + new RemoteStoreStats[] { primaryShardStats, replicaShardStats }, + 2, + 2, + 0, + new ArrayList() + ); - assertEquals(shardsObject.get("total"), 1); - assertEquals(shardsObject.get("successful"), 1); - assertEquals(shardsObject.get("failed"), 0); - compareStatsResponse(statsObject, pressureTrackerStats); + XContentBuilder builder = XContentFactory.jsonBuilder(); + statsResponse.toXContent(builder, EMPTY_PARAMS); + Map jsonResponseObject = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()) + .v2(); + Map metadataShardsObject = (Map) jsonResponseObject.get("_shards"); + assertEquals(2, metadataShardsObject.get("total")); + assertEquals(2, metadataShardsObject.get("successful")); + assertEquals(0, metadataShardsObject.get("failed")); + Map indicesObject = (Map) jsonResponseObject.get("indices"); + assertTrue(indicesObject.containsKey("index")); + Map shardsObject = (Map) ((Map) indicesObject.get("index")).get("shards"); + ArrayList> perShardNumberObject = (ArrayList>) shardsObject.get("0"); + assertEquals(2, perShardNumberObject.size()); + perShardNumberObject.forEach(shardObject -> { + boolean isPrimary = (boolean) ((Map) shardObject.get(RemoteStoreStats.Fields.ROUTING)).get( + RemoteStoreStats.RoutingFields.PRIMARY + ); + if (isPrimary) { + compareStatsResponse(shardObject, mockPrimaryTrackerStats, primaryShardRouting); + } else { + compareStatsResponse(shardObject, mockReplicaTrackerStats, replicaShardRouting); + } + }); } } diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsTestHelper.java b/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsTestHelper.java index 747dc692b1d5d..0c081ee238e2d 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsTestHelper.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsTestHelper.java @@ -8,80 +8,264 @@ package org.opensearch.action.admin.cluster.remotestore.stats; -import org.opensearch.index.remote.RemoteRefreshSegmentTracker; +import org.opensearch.cluster.routing.ShardRouting; +import org.opensearch.cluster.routing.ShardRoutingState; +import org.opensearch.cluster.routing.TestShardRouting; +import org.opensearch.index.remote.RemoteSegmentTransferTracker; import org.opensearch.core.index.shard.ShardId; +import org.opensearch.index.store.DirectoryFileTransferTracker; import java.util.Map; +import static org.junit.Assert.assertTrue; import static org.opensearch.test.OpenSearchTestCase.assertEquals; +import static org.opensearch.test.OpenSearchTestCase.randomAlphaOfLength; /** * Helper utilities for Remote Store stats tests */ public class RemoteStoreStatsTestHelper { - static RemoteRefreshSegmentTracker.Stats createPressureTrackerStats(ShardId shardId) { - return new RemoteRefreshSegmentTracker.Stats(shardId, 101, 102, 100, 3, 2, 10, 5, 5, 10, 5, 5, 3, 2, 5, 2, 3, 4, 9); + static RemoteSegmentTransferTracker.Stats createStatsForNewPrimary(ShardId shardId) { + return new RemoteSegmentTransferTracker.Stats( + shardId, + 101, + 102, + 100, + 0, + 10, + 2, + 10, + 5, + 5, + 0, + 0, + 0, + 5, + 5, + 5, + 0, + 0, + 0, + createZeroDirectoryFileTransferStats() + ); } - static void compareStatsResponse(Map statsObject, RemoteRefreshSegmentTracker.Stats pressureTrackerStats) { - assertEquals(statsObject.get(RemoteStoreStats.Fields.SHARD_ID), pressureTrackerStats.shardId.toString()); - assertEquals(statsObject.get(RemoteStoreStats.Fields.LOCAL_REFRESH_TIMESTAMP), (int) pressureTrackerStats.localRefreshClockTimeMs); - assertEquals( - statsObject.get(RemoteStoreStats.Fields.REMOTE_REFRESH_TIMESTAMP), - (int) pressureTrackerStats.remoteRefreshClockTimeMs - ); - assertEquals(statsObject.get(RemoteStoreStats.Fields.REFRESH_TIME_LAG_IN_MILLIS), (int) pressureTrackerStats.refreshTimeLagMs); - assertEquals( - statsObject.get(RemoteStoreStats.Fields.REFRESH_LAG), - (int) (pressureTrackerStats.localRefreshNumber - pressureTrackerStats.remoteRefreshNumber) + static RemoteSegmentTransferTracker.Stats createStatsForNewReplica(ShardId shardId) { + return new RemoteSegmentTransferTracker.Stats( + shardId, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + createSampleDirectoryFileTransferStats() ); - assertEquals(statsObject.get(RemoteStoreStats.Fields.BYTES_LAG), (int) pressureTrackerStats.bytesLag); + } - assertEquals(statsObject.get(RemoteStoreStats.Fields.BACKPRESSURE_REJECTION_COUNT), (int) pressureTrackerStats.rejectionCount); - assertEquals( - statsObject.get(RemoteStoreStats.Fields.CONSECUTIVE_FAILURE_COUNT), - (int) pressureTrackerStats.consecutiveFailuresCount + static RemoteSegmentTransferTracker.Stats createStatsForRemoteStoreRestoredPrimary(ShardId shardId) { + return new RemoteSegmentTransferTracker.Stats( + shardId, + 50, + 50, + 0, + 50, + 11, + 11, + 10, + 10, + 0, + 10, + 10, + 0, + 10, + 10, + 0, + 0, + 0, + 100, + createSampleDirectoryFileTransferStats() ); + } + static DirectoryFileTransferTracker.Stats createSampleDirectoryFileTransferStats() { + return new DirectoryFileTransferTracker.Stats(10, 0, 10, 12345, 5, 5, 5); + } + + static DirectoryFileTransferTracker.Stats createZeroDirectoryFileTransferStats() { + return new DirectoryFileTransferTracker.Stats(0, 0, 0, 0, 0, 0, 0); + } + + static ShardRouting createShardRouting(ShardId shardId, boolean isPrimary) { + return TestShardRouting.newShardRouting(shardId, randomAlphaOfLength(4), isPrimary, ShardRoutingState.STARTED); + } + + static void compareStatsResponse( + Map statsObject, + RemoteSegmentTransferTracker.Stats statsTracker, + ShardRouting routing + ) { assertEquals( - ((Map) statsObject.get(RemoteStoreStats.Fields.TOTAL_UPLOADS_IN_BYTES)).get(RemoteStoreStats.SubFields.STARTED), - (int) pressureTrackerStats.uploadBytesStarted - ); - assertEquals( - ((Map) statsObject.get(RemoteStoreStats.Fields.TOTAL_UPLOADS_IN_BYTES)).get(RemoteStoreStats.SubFields.SUCCEEDED), - (int) pressureTrackerStats.uploadBytesSucceeded - ); - assertEquals( - ((Map) statsObject.get(RemoteStoreStats.Fields.TOTAL_UPLOADS_IN_BYTES)).get(RemoteStoreStats.SubFields.FAILED), - (int) pressureTrackerStats.uploadBytesFailed - ); - assertEquals( - ((Map) statsObject.get(RemoteStoreStats.Fields.REMOTE_REFRESH_SIZE_IN_BYTES)).get(RemoteStoreStats.SubFields.MOVING_AVG), - pressureTrackerStats.uploadBytesMovingAverage - ); - assertEquals( - ((Map) statsObject.get(RemoteStoreStats.Fields.REMOTE_REFRESH_SIZE_IN_BYTES)).get(RemoteStoreStats.SubFields.LAST_SUCCESSFUL), - (int) pressureTrackerStats.lastSuccessfulRemoteRefreshBytes - ); - assertEquals( - ((Map) statsObject.get(RemoteStoreStats.Fields.UPLOAD_LATENCY_IN_BYTES_PER_SEC)).get(RemoteStoreStats.SubFields.MOVING_AVG), - pressureTrackerStats.uploadBytesPerSecMovingAverage - ); - assertEquals( - ((Map) statsObject.get(RemoteStoreStats.Fields.TOTAL_REMOTE_REFRESH)).get(RemoteStoreStats.SubFields.STARTED), - (int) pressureTrackerStats.totalUploadsStarted - ); - assertEquals( - ((Map) statsObject.get(RemoteStoreStats.Fields.TOTAL_REMOTE_REFRESH)).get(RemoteStoreStats.SubFields.SUCCEEDED), - (int) pressureTrackerStats.totalUploadsSucceeded + ((Map) statsObject.get(RemoteStoreStats.Fields.ROUTING)).get(RemoteStoreStats.RoutingFields.NODE_ID), + routing.currentNodeId() ); assertEquals( - ((Map) statsObject.get(RemoteStoreStats.Fields.TOTAL_REMOTE_REFRESH)).get(RemoteStoreStats.SubFields.FAILED), - (int) pressureTrackerStats.totalUploadsFailed + ((Map) statsObject.get(RemoteStoreStats.Fields.ROUTING)).get(RemoteStoreStats.RoutingFields.STATE), + routing.state().toString() ); assertEquals( - ((Map) statsObject.get(RemoteStoreStats.Fields.REMOTE_REFRESH_LATENCY_IN_MILLIS)).get(RemoteStoreStats.SubFields.MOVING_AVG), - pressureTrackerStats.uploadTimeMovingAverage + ((Map) statsObject.get(RemoteStoreStats.Fields.ROUTING)).get(RemoteStoreStats.RoutingFields.PRIMARY), + routing.primary() ); + + Map segment = ((Map) statsObject.get(RemoteStoreStats.Fields.SEGMENT)); + Map segmentDownloads = ((Map) segment.get(RemoteStoreStats.SubFields.DOWNLOAD)); + Map segmentUploads = ((Map) segment.get(RemoteStoreStats.SubFields.UPLOAD)); + + if (statsTracker.directoryFileTransferTrackerStats.transferredBytesStarted != 0) { + assertEquals( + segmentDownloads.get(RemoteStoreStats.DownloadStatsFields.LAST_SYNC_TIMESTAMP), + (int) statsTracker.directoryFileTransferTrackerStats.lastTransferTimestampMs + ); + assertEquals( + ((Map) segmentDownloads.get(RemoteStoreStats.DownloadStatsFields.TOTAL_DOWNLOADS_IN_BYTES)).get( + RemoteStoreStats.SubFields.STARTED + ), + (int) statsTracker.directoryFileTransferTrackerStats.transferredBytesStarted + ); + assertEquals( + ((Map) segmentDownloads.get(RemoteStoreStats.DownloadStatsFields.TOTAL_DOWNLOADS_IN_BYTES)).get( + RemoteStoreStats.SubFields.SUCCEEDED + ), + (int) statsTracker.directoryFileTransferTrackerStats.transferredBytesSucceeded + ); + assertEquals( + ((Map) segmentDownloads.get(RemoteStoreStats.DownloadStatsFields.TOTAL_DOWNLOADS_IN_BYTES)).get( + RemoteStoreStats.SubFields.FAILED + ), + (int) statsTracker.directoryFileTransferTrackerStats.transferredBytesFailed + ); + assertEquals( + ((Map) segmentDownloads.get(RemoteStoreStats.DownloadStatsFields.DOWNLOAD_SIZE_IN_BYTES)).get( + RemoteStoreStats.SubFields.LAST_SUCCESSFUL + ), + (int) statsTracker.directoryFileTransferTrackerStats.lastSuccessfulTransferInBytes + ); + assertEquals( + ((Map) segmentDownloads.get(RemoteStoreStats.DownloadStatsFields.DOWNLOAD_SIZE_IN_BYTES)).get( + RemoteStoreStats.SubFields.MOVING_AVG + ), + statsTracker.directoryFileTransferTrackerStats.transferredBytesMovingAverage + ); + assertEquals( + ((Map) segmentDownloads.get(RemoteStoreStats.DownloadStatsFields.DOWNLOAD_SPEED_IN_BYTES_PER_SEC)).get( + RemoteStoreStats.SubFields.MOVING_AVG + ), + statsTracker.directoryFileTransferTrackerStats.transferredBytesPerSecMovingAverage + ); + } else { + assertTrue(segmentDownloads.isEmpty()); + } + + if (statsTracker.totalUploadsStarted != 0) { + assertEquals( + segmentUploads.get(RemoteStoreStats.UploadStatsFields.LOCAL_REFRESH_TIMESTAMP), + (int) statsTracker.localRefreshClockTimeMs + ); + assertEquals( + segmentUploads.get(RemoteStoreStats.UploadStatsFields.REMOTE_REFRESH_TIMESTAMP), + (int) statsTracker.remoteRefreshClockTimeMs + ); + assertEquals( + segmentUploads.get(RemoteStoreStats.UploadStatsFields.REFRESH_TIME_LAG_IN_MILLIS), + (int) statsTracker.refreshTimeLagMs + ); + assertEquals( + segmentUploads.get(RemoteStoreStats.UploadStatsFields.REFRESH_LAG), + (int) (statsTracker.localRefreshNumber - statsTracker.remoteRefreshNumber) + ); + assertEquals(segmentUploads.get(RemoteStoreStats.UploadStatsFields.BYTES_LAG), (int) statsTracker.bytesLag); + + assertEquals( + segmentUploads.get(RemoteStoreStats.UploadStatsFields.BACKPRESSURE_REJECTION_COUNT), + (int) statsTracker.rejectionCount + ); + assertEquals( + segmentUploads.get(RemoteStoreStats.UploadStatsFields.CONSECUTIVE_FAILURE_COUNT), + (int) statsTracker.consecutiveFailuresCount + ); + assertEquals( + ((Map) segmentUploads.get(RemoteStoreStats.UploadStatsFields.TOTAL_UPLOADS_IN_BYTES)).get( + RemoteStoreStats.SubFields.STARTED + ), + (int) statsTracker.uploadBytesStarted + ); + assertEquals( + ((Map) segmentUploads.get(RemoteStoreStats.UploadStatsFields.TOTAL_UPLOADS_IN_BYTES)).get( + RemoteStoreStats.SubFields.SUCCEEDED + ), + (int) statsTracker.uploadBytesSucceeded + ); + assertEquals( + ((Map) segmentUploads.get(RemoteStoreStats.UploadStatsFields.TOTAL_UPLOADS_IN_BYTES)).get( + RemoteStoreStats.SubFields.FAILED + ), + (int) statsTracker.uploadBytesFailed + ); + assertEquals( + ((Map) segmentUploads.get(RemoteStoreStats.UploadStatsFields.REMOTE_REFRESH_SIZE_IN_BYTES)).get( + RemoteStoreStats.SubFields.MOVING_AVG + ), + statsTracker.uploadBytesMovingAverage + ); + assertEquals( + ((Map) segmentUploads.get(RemoteStoreStats.UploadStatsFields.REMOTE_REFRESH_SIZE_IN_BYTES)).get( + RemoteStoreStats.SubFields.LAST_SUCCESSFUL + ), + (int) statsTracker.lastSuccessfulRemoteRefreshBytes + ); + assertEquals( + ((Map) segmentUploads.get(RemoteStoreStats.UploadStatsFields.UPLOAD_LATENCY_IN_BYTES_PER_SEC)).get( + RemoteStoreStats.SubFields.MOVING_AVG + ), + statsTracker.uploadBytesPerSecMovingAverage + ); + assertEquals( + ((Map) segmentUploads.get(RemoteStoreStats.UploadStatsFields.TOTAL_SYNCS_TO_REMOTE)).get( + RemoteStoreStats.SubFields.STARTED + ), + (int) statsTracker.totalUploadsStarted + ); + assertEquals( + ((Map) segmentUploads.get(RemoteStoreStats.UploadStatsFields.TOTAL_SYNCS_TO_REMOTE)).get( + RemoteStoreStats.SubFields.SUCCEEDED + ), + (int) statsTracker.totalUploadsSucceeded + ); + assertEquals( + ((Map) segmentUploads.get(RemoteStoreStats.UploadStatsFields.TOTAL_SYNCS_TO_REMOTE)).get(RemoteStoreStats.SubFields.FAILED), + (int) statsTracker.totalUploadsFailed + ); + assertEquals( + ((Map) segmentUploads.get(RemoteStoreStats.UploadStatsFields.REMOTE_REFRESH_LATENCY_IN_MILLIS)).get( + RemoteStoreStats.SubFields.MOVING_AVG + ), + statsTracker.uploadTimeMovingAverage + ); + } else { + assertTrue(segmentUploads.isEmpty()); + } } } diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsTests.java index fc057b71b15f8..3597a5350e1fb 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsTests.java @@ -9,12 +9,13 @@ package org.opensearch.action.admin.cluster.remotestore.stats; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.index.remote.RemoteRefreshSegmentTracker; +import org.opensearch.index.remote.RemoteSegmentTransferTracker; import org.opensearch.core.index.shard.ShardId; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; @@ -24,7 +25,10 @@ import java.util.Map; import static org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsTestHelper.compareStatsResponse; -import static org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsTestHelper.createPressureTrackerStats; +import static org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsTestHelper.createStatsForNewReplica; +import static org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsTestHelper.createShardRouting; +import static org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsTestHelper.createStatsForNewPrimary; +import static org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsTestHelper.createStatsForRemoteStoreRestoredPrimary; import static org.opensearch.core.xcontent.ToXContent.EMPTY_PARAMS; public class RemoteStoreStatsTests extends OpenSearchTestCase { @@ -44,43 +48,175 @@ public void tearDown() throws Exception { threadPool.shutdownNow(); } - public void testXContentBuilder() throws IOException { - RemoteRefreshSegmentTracker.Stats pressureTrackerStats = createPressureTrackerStats(shardId); - RemoteStoreStats stats = new RemoteStoreStats(pressureTrackerStats); + public void testXContentBuilderWithPrimaryShard() throws IOException { + RemoteSegmentTransferTracker.Stats uploadStats = createStatsForNewPrimary(shardId); + ShardRouting routing = createShardRouting(shardId, true); + RemoteStoreStats stats = new RemoteStoreStats(uploadStats, routing); XContentBuilder builder = XContentFactory.jsonBuilder(); stats.toXContent(builder, EMPTY_PARAMS); Map jsonObject = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); - compareStatsResponse(jsonObject, pressureTrackerStats); + compareStatsResponse(jsonObject, uploadStats, routing); } - public void testSerialization() throws Exception { - RemoteRefreshSegmentTracker.Stats pressureTrackerStats = createPressureTrackerStats(shardId); - RemoteStoreStats stats = new RemoteStoreStats(pressureTrackerStats); + public void testXContentBuilderWithReplicaShard() throws IOException { + RemoteSegmentTransferTracker.Stats downloadStats = createStatsForNewReplica(shardId); + ShardRouting routing = createShardRouting(shardId, false); + RemoteStoreStats stats = new RemoteStoreStats(downloadStats, routing); + + XContentBuilder builder = XContentFactory.jsonBuilder(); + stats.toXContent(builder, EMPTY_PARAMS); + Map jsonObject = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); + compareStatsResponse(jsonObject, downloadStats, routing); + } + + public void testXContentBuilderWithRemoteStoreRestoredShard() throws IOException { + RemoteSegmentTransferTracker.Stats remotestoreRestoredShardStats = createStatsForRemoteStoreRestoredPrimary(shardId); + ShardRouting routing = createShardRouting(shardId, true); + RemoteStoreStats stats = new RemoteStoreStats(remotestoreRestoredShardStats, routing); + + XContentBuilder builder = XContentFactory.jsonBuilder(); + stats.toXContent(builder, EMPTY_PARAMS); + Map jsonObject = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); + compareStatsResponse(jsonObject, remotestoreRestoredShardStats, routing); + } + + public void testSerializationForPrimaryShard() throws Exception { + RemoteSegmentTransferTracker.Stats primaryShardStats = createStatsForNewPrimary(shardId); + RemoteStoreStats stats = new RemoteStoreStats(primaryShardStats, createShardRouting(shardId, true)); + try (BytesStreamOutput out = new BytesStreamOutput()) { + stats.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + RemoteSegmentTransferTracker.Stats deserializedStats = new RemoteStoreStats(in).getStats(); + assertEquals(stats.getStats().refreshTimeLagMs, deserializedStats.refreshTimeLagMs); + assertEquals(stats.getStats().localRefreshNumber, deserializedStats.localRefreshNumber); + assertEquals(stats.getStats().remoteRefreshNumber, deserializedStats.remoteRefreshNumber); + assertEquals(stats.getStats().uploadBytesStarted, deserializedStats.uploadBytesStarted); + assertEquals(stats.getStats().uploadBytesSucceeded, deserializedStats.uploadBytesSucceeded); + assertEquals(stats.getStats().uploadBytesFailed, deserializedStats.uploadBytesFailed); + assertEquals(stats.getStats().totalUploadsStarted, deserializedStats.totalUploadsStarted); + assertEquals(stats.getStats().totalUploadsFailed, deserializedStats.totalUploadsFailed); + assertEquals(stats.getStats().totalUploadsSucceeded, deserializedStats.totalUploadsSucceeded); + assertEquals(stats.getStats().rejectionCount, deserializedStats.rejectionCount); + assertEquals(stats.getStats().consecutiveFailuresCount, deserializedStats.consecutiveFailuresCount); + assertEquals(stats.getStats().uploadBytesMovingAverage, deserializedStats.uploadBytesMovingAverage, 0); + assertEquals(stats.getStats().uploadBytesPerSecMovingAverage, deserializedStats.uploadBytesPerSecMovingAverage, 0); + assertEquals(stats.getStats().uploadTimeMovingAverage, deserializedStats.uploadTimeMovingAverage, 0); + assertEquals(stats.getStats().bytesLag, deserializedStats.bytesLag); + assertEquals(0, deserializedStats.directoryFileTransferTrackerStats.transferredBytesStarted); + assertEquals(0, deserializedStats.directoryFileTransferTrackerStats.transferredBytesFailed); + assertEquals(0, deserializedStats.directoryFileTransferTrackerStats.transferredBytesSucceeded); + assertEquals(0, deserializedStats.directoryFileTransferTrackerStats.lastSuccessfulTransferInBytes); + assertEquals(0, deserializedStats.directoryFileTransferTrackerStats.lastTransferTimestampMs); + } + } + } + + public void testSerializationForReplicaShard() throws Exception { + RemoteSegmentTransferTracker.Stats replicaShardStats = createStatsForNewReplica(shardId); + RemoteStoreStats stats = new RemoteStoreStats(replicaShardStats, createShardRouting(shardId, false)); try (BytesStreamOutput out = new BytesStreamOutput()) { stats.writeTo(out); try (StreamInput in = out.bytes().streamInput()) { - RemoteStoreStats deserializedStats = new RemoteStoreStats(in); - assertEquals(deserializedStats.getStats().shardId.toString(), stats.getStats().shardId.toString()); - assertEquals(deserializedStats.getStats().refreshTimeLagMs, stats.getStats().refreshTimeLagMs); - assertEquals(deserializedStats.getStats().localRefreshNumber, stats.getStats().localRefreshNumber); - assertEquals(deserializedStats.getStats().remoteRefreshNumber, stats.getStats().remoteRefreshNumber); - assertEquals(deserializedStats.getStats().uploadBytesStarted, stats.getStats().uploadBytesStarted); - assertEquals(deserializedStats.getStats().uploadBytesSucceeded, stats.getStats().uploadBytesSucceeded); - assertEquals(deserializedStats.getStats().uploadBytesFailed, stats.getStats().uploadBytesFailed); - assertEquals(deserializedStats.getStats().totalUploadsStarted, stats.getStats().totalUploadsStarted); - assertEquals(deserializedStats.getStats().totalUploadsFailed, stats.getStats().totalUploadsFailed); - assertEquals(deserializedStats.getStats().totalUploadsSucceeded, stats.getStats().totalUploadsSucceeded); - assertEquals(deserializedStats.getStats().rejectionCount, stats.getStats().rejectionCount); - assertEquals(deserializedStats.getStats().consecutiveFailuresCount, stats.getStats().consecutiveFailuresCount); - assertEquals(deserializedStats.getStats().uploadBytesMovingAverage, stats.getStats().uploadBytesMovingAverage, 0); + RemoteSegmentTransferTracker.Stats deserializedStats = new RemoteStoreStats(in).getStats(); + assertEquals(0, deserializedStats.refreshTimeLagMs); + assertEquals(0, deserializedStats.localRefreshNumber); + assertEquals(0, deserializedStats.remoteRefreshNumber); + assertEquals(0, deserializedStats.uploadBytesStarted); + assertEquals(0, deserializedStats.uploadBytesSucceeded); + assertEquals(0, deserializedStats.uploadBytesFailed); + assertEquals(0, deserializedStats.totalUploadsStarted); + assertEquals(0, deserializedStats.totalUploadsFailed); + assertEquals(0, deserializedStats.totalUploadsSucceeded); + assertEquals(0, deserializedStats.rejectionCount); + assertEquals(0, deserializedStats.consecutiveFailuresCount); + assertEquals(0, deserializedStats.bytesLag); + assertEquals( + stats.getStats().directoryFileTransferTrackerStats.transferredBytesStarted, + deserializedStats.directoryFileTransferTrackerStats.transferredBytesStarted + ); + assertEquals( + stats.getStats().directoryFileTransferTrackerStats.transferredBytesFailed, + deserializedStats.directoryFileTransferTrackerStats.transferredBytesFailed + ); + assertEquals( + stats.getStats().directoryFileTransferTrackerStats.transferredBytesSucceeded, + deserializedStats.directoryFileTransferTrackerStats.transferredBytesSucceeded + ); + assertEquals( + stats.getStats().directoryFileTransferTrackerStats.lastSuccessfulTransferInBytes, + deserializedStats.directoryFileTransferTrackerStats.lastSuccessfulTransferInBytes + ); + assertEquals( + stats.getStats().directoryFileTransferTrackerStats.lastTransferTimestampMs, + deserializedStats.directoryFileTransferTrackerStats.lastTransferTimestampMs + ); + assertEquals( + stats.getStats().directoryFileTransferTrackerStats.transferredBytesPerSecMovingAverage, + deserializedStats.directoryFileTransferTrackerStats.transferredBytesPerSecMovingAverage, + 0 + ); + assertEquals( + stats.getStats().directoryFileTransferTrackerStats.transferredBytesMovingAverage, + deserializedStats.directoryFileTransferTrackerStats.transferredBytesMovingAverage, + 0 + ); + } + } + } + + public void testSerializationForRemoteStoreRestoredPrimaryShard() throws Exception { + RemoteSegmentTransferTracker.Stats primaryShardStats = createStatsForRemoteStoreRestoredPrimary(shardId); + RemoteStoreStats stats = new RemoteStoreStats(primaryShardStats, createShardRouting(shardId, true)); + try (BytesStreamOutput out = new BytesStreamOutput()) { + stats.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + RemoteSegmentTransferTracker.Stats deserializedStats = new RemoteStoreStats(in).getStats(); + assertEquals(stats.getStats().refreshTimeLagMs, deserializedStats.refreshTimeLagMs); + assertEquals(stats.getStats().localRefreshNumber, deserializedStats.localRefreshNumber); + assertEquals(stats.getStats().remoteRefreshNumber, deserializedStats.remoteRefreshNumber); + assertEquals(stats.getStats().uploadBytesStarted, deserializedStats.uploadBytesStarted); + assertEquals(stats.getStats().uploadBytesSucceeded, deserializedStats.uploadBytesSucceeded); + assertEquals(stats.getStats().uploadBytesFailed, deserializedStats.uploadBytesFailed); + assertEquals(stats.getStats().totalUploadsStarted, deserializedStats.totalUploadsStarted); + assertEquals(stats.getStats().totalUploadsFailed, deserializedStats.totalUploadsFailed); + assertEquals(stats.getStats().totalUploadsSucceeded, deserializedStats.totalUploadsSucceeded); + assertEquals(stats.getStats().rejectionCount, deserializedStats.rejectionCount); + assertEquals(stats.getStats().consecutiveFailuresCount, deserializedStats.consecutiveFailuresCount); + assertEquals(stats.getStats().uploadBytesMovingAverage, deserializedStats.uploadBytesMovingAverage, 0); + assertEquals(stats.getStats().uploadBytesPerSecMovingAverage, deserializedStats.uploadBytesPerSecMovingAverage, 0); + assertEquals(stats.getStats().uploadTimeMovingAverage, deserializedStats.uploadTimeMovingAverage, 0); + assertEquals(stats.getStats().bytesLag, deserializedStats.bytesLag); + assertEquals( + stats.getStats().directoryFileTransferTrackerStats.transferredBytesStarted, + deserializedStats.directoryFileTransferTrackerStats.transferredBytesStarted + ); + assertEquals( + stats.getStats().directoryFileTransferTrackerStats.transferredBytesFailed, + deserializedStats.directoryFileTransferTrackerStats.transferredBytesFailed + ); + assertEquals( + stats.getStats().directoryFileTransferTrackerStats.transferredBytesSucceeded, + deserializedStats.directoryFileTransferTrackerStats.transferredBytesSucceeded + ); + assertEquals( + stats.getStats().directoryFileTransferTrackerStats.lastSuccessfulTransferInBytes, + deserializedStats.directoryFileTransferTrackerStats.lastSuccessfulTransferInBytes + ); + assertEquals( + stats.getStats().directoryFileTransferTrackerStats.lastTransferTimestampMs, + deserializedStats.directoryFileTransferTrackerStats.lastTransferTimestampMs + ); + assertEquals( + stats.getStats().directoryFileTransferTrackerStats.transferredBytesPerSecMovingAverage, + deserializedStats.directoryFileTransferTrackerStats.transferredBytesPerSecMovingAverage, + 0 + ); assertEquals( - deserializedStats.getStats().uploadBytesPerSecMovingAverage, - stats.getStats().uploadBytesPerSecMovingAverage, + stats.getStats().directoryFileTransferTrackerStats.transferredBytesMovingAverage, + deserializedStats.directoryFileTransferTrackerStats.transferredBytesMovingAverage, 0 ); - assertEquals(deserializedStats.getStats().uploadTimeMovingAverage, stats.getStats().uploadTimeMovingAverage, 0); - assertEquals(deserializedStats.getStats().bytesLag, stats.getStats().bytesLag); } } } diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsActionTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsActionTests.java index 25e44884814a5..375b1b8ed7aba 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsActionTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsActionTests.java @@ -29,7 +29,7 @@ import org.opensearch.index.IndexService; import org.opensearch.index.IndexSettings; import org.opensearch.index.remote.RemoteRefreshSegmentPressureService; -import org.opensearch.index.remote.RemoteRefreshSegmentTracker; +import org.opensearch.index.remote.RemoteSegmentTransferTracker; import org.opensearch.index.shard.IndexShardTestCase; import org.opensearch.indices.IndicesService; import org.opensearch.test.FeatureFlagSetter; @@ -86,7 +86,7 @@ public void setUp() throws Exception { Collections.emptySet() ); - when(pressureService.getRemoteRefreshSegmentTracker(any())).thenReturn(mock(RemoteRefreshSegmentTracker.class)); + when(pressureService.getRemoteRefreshSegmentTracker(any())).thenReturn(mock(RemoteSegmentTransferTracker.class)); when(indicesService.indexService(INDEX)).thenReturn(indexService); when(indexService.getIndexSettings()).thenReturn(new IndexSettings(remoteStoreIndexMetadata, Settings.EMPTY)); statsAction = new TransportRemoteStoreStatsAction( @@ -108,7 +108,7 @@ public void tearDown() throws Exception { clusterService.close(); } - public void testOnlyPrimaryShards() throws Exception { + public void testAllShardCopies() throws Exception { FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); RoutingTable routingTable = RoutingTable.builder().addAsNew(remoteStoreIndexMetadata).build(); Metadata metadata = Metadata.builder().put(remoteStoreIndexMetadata, false).build(); @@ -125,7 +125,7 @@ public void testOnlyPrimaryShards() throws Exception { new String[] { INDEX.getName() } ); - assertEquals(shardsIterator.size(), 2); + assertEquals(shardsIterator.size(), 4); } public void testOnlyLocalShards() throws Exception { @@ -153,10 +153,10 @@ public void testOnlyLocalShards() throws Exception { remoteStoreStatsRequest.local(true); ShardsIterator shardsIterator = statsAction.shards(clusterService.state(), remoteStoreStatsRequest, concreteIndices); - assertEquals(shardsIterator.size(), 1); + assertEquals(shardsIterator.size(), 2); } - public void testOnlyRemoteStoreEnabledShards() throws Exception { + public void testOnlyRemoteStoreEnabledShardCopies() throws Exception { FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); Index NEW_INDEX = new Index("newIndex", "newUUID"); IndexMetadata indexMetadataWithoutRemoteStore = IndexMetadata.builder(NEW_INDEX.getName()) @@ -189,6 +189,6 @@ public void testOnlyRemoteStoreEnabledShards() throws Exception { new String[] { INDEX.getName() } ); - assertEquals(shardsIterator.size(), 2); + assertEquals(shardsIterator.size(), 4); } } diff --git a/server/src/test/java/org/opensearch/index/remote/RemoteRefreshSegmentPressureServiceTests.java b/server/src/test/java/org/opensearch/index/remote/RemoteRefreshSegmentPressureServiceTests.java index 5ccacd4048596..7b180a71f3bab 100644 --- a/server/src/test/java/org/opensearch/index/remote/RemoteRefreshSegmentPressureServiceTests.java +++ b/server/src/test/java/org/opensearch/index/remote/RemoteRefreshSegmentPressureServiceTests.java @@ -16,6 +16,7 @@ import org.opensearch.index.IndexSettings; import org.opensearch.index.shard.IndexShard; import org.opensearch.core.index.shard.ShardId; +import org.opensearch.index.store.Store; import org.opensearch.test.IndexSettingsModule; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; @@ -99,7 +100,7 @@ public void testValidateSegmentUploadLag() { pressureService = new RemoteRefreshSegmentPressureService(clusterService, Settings.EMPTY); pressureService.afterIndexShardCreated(indexShard); - RemoteRefreshSegmentTracker pressureTracker = pressureService.getRemoteRefreshSegmentTracker(shardId); + RemoteSegmentTransferTracker pressureTracker = pressureService.getRemoteRefreshSegmentTracker(shardId); pressureTracker.updateLocalRefreshSeqNo(6); // 1. time lag more than dynamic threshold @@ -152,10 +153,11 @@ public void testValidateSegmentUploadLag() { private static IndexShard createIndexShard(ShardId shardId, boolean remoteStoreEnabled) { Settings settings = Settings.builder().put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, String.valueOf(remoteStoreEnabled)).build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test_index", settings); + Store store = mock(Store.class); IndexShard indexShard = mock(IndexShard.class); when(indexShard.indexSettings()).thenReturn(indexSettings); when(indexShard.shardId()).thenReturn(shardId); + when(indexShard.store()).thenReturn(store); return indexShard; } - } diff --git a/server/src/test/java/org/opensearch/index/remote/RemoteRefreshSegmentTrackerTests.java b/server/src/test/java/org/opensearch/index/remote/RemoteSegmentTransferTrackerTests.java similarity index 66% rename from server/src/test/java/org/opensearch/index/remote/RemoteRefreshSegmentTrackerTests.java rename to server/src/test/java/org/opensearch/index/remote/RemoteSegmentTransferTrackerTests.java index badfeb0d67c05..208cea111f2e1 100644 --- a/server/src/test/java/org/opensearch/index/remote/RemoteRefreshSegmentTrackerTests.java +++ b/server/src/test/java/org/opensearch/index/remote/RemoteSegmentTransferTrackerTests.java @@ -14,6 +14,7 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; import org.opensearch.core.index.shard.ShardId; +import org.opensearch.index.store.DirectoryFileTransferTracker; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; @@ -24,7 +25,7 @@ import static org.mockito.Mockito.mock; -public class RemoteRefreshSegmentTrackerTests extends OpenSearchTestCase { +public class RemoteSegmentTransferTrackerTests extends OpenSearchTestCase { private RemoteRefreshSegmentPressureSettings pressureSettings; @@ -34,7 +35,9 @@ public class RemoteRefreshSegmentTrackerTests extends OpenSearchTestCase { private ShardId shardId; - private RemoteRefreshSegmentTracker pressureTracker; + private RemoteSegmentTransferTracker pressureTracker; + + private DirectoryFileTransferTracker directoryFileTransferTracker; @Override public void setUp() throws Exception { @@ -51,6 +54,7 @@ public void setUp() throws Exception { mock(RemoteRefreshSegmentPressureService.class) ); shardId = new ShardId("index", "uuid", 0); + directoryFileTransferTracker = new DirectoryFileTransferTracker(); } @Override @@ -60,8 +64,9 @@ public void tearDown() throws Exception { } public void testGetShardId() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -70,8 +75,9 @@ public void testGetShardId() { } public void testUpdateLocalRefreshSeqNo() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -82,8 +88,9 @@ public void testUpdateLocalRefreshSeqNo() { } public void testUpdateRemoteRefreshSeqNo() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -94,8 +101,9 @@ public void testUpdateRemoteRefreshSeqNo() { } public void testUpdateLocalRefreshTimeMs() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -106,8 +114,9 @@ public void testUpdateLocalRefreshTimeMs() { } public void testUpdateRemoteRefreshTimeMs() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -117,9 +126,23 @@ public void testUpdateRemoteRefreshTimeMs() { assertEquals(refreshTimeMs, pressureTracker.getRemoteRefreshTimeMs()); } + public void testLastDownloadTimestampMs() { + pressureTracker = new RemoteSegmentTransferTracker( + shardId, + directoryFileTransferTracker, + pressureSettings.getUploadBytesMovingAverageWindowSize(), + pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), + pressureSettings.getUploadTimeMovingAverageWindowSize() + ); + long currentTimeInMs = System.currentTimeMillis(); + pressureTracker.getDirectoryFileTransferTracker().updateLastTransferTimestampMs(currentTimeInMs); + assertEquals(currentTimeInMs, pressureTracker.getDirectoryFileTransferTracker().getLastTransferTimestampMs()); + } + public void testComputeSeqNoLagOnUpdate() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -133,8 +156,9 @@ public void testComputeSeqNoLagOnUpdate() { } public void testComputeTimeLagOnUpdate() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -150,8 +174,9 @@ public void testComputeTimeLagOnUpdate() { } public void testAddUploadBytesStarted() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -165,8 +190,9 @@ public void testAddUploadBytesStarted() { } public void testAddUploadBytesFailed() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -180,8 +206,9 @@ public void testAddUploadBytesFailed() { } public void testAddUploadBytesSucceeded() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -194,9 +221,58 @@ public void testAddUploadBytesSucceeded() { assertEquals(bytesToAdd + moreBytesToAdd, pressureTracker.getUploadBytesSucceeded()); } + public void testAddDownloadBytesStarted() { + pressureTracker = new RemoteSegmentTransferTracker( + shardId, + directoryFileTransferTracker, + pressureSettings.getUploadBytesMovingAverageWindowSize(), + pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), + pressureSettings.getUploadTimeMovingAverageWindowSize() + ); + long bytesToAdd = randomLongBetween(1000, 1000000); + pressureTracker.getDirectoryFileTransferTracker().addTransferredBytesStarted(bytesToAdd); + assertEquals(bytesToAdd, pressureTracker.getDirectoryFileTransferTracker().getTransferredBytesStarted()); + long moreBytesToAdd = randomLongBetween(1000, 10000); + pressureTracker.getDirectoryFileTransferTracker().addTransferredBytesStarted(moreBytesToAdd); + assertEquals(bytesToAdd + moreBytesToAdd, pressureTracker.getDirectoryFileTransferTracker().getTransferredBytesStarted()); + } + + public void testAddDownloadBytesFailed() { + pressureTracker = new RemoteSegmentTransferTracker( + shardId, + directoryFileTransferTracker, + pressureSettings.getUploadBytesMovingAverageWindowSize(), + pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), + pressureSettings.getUploadTimeMovingAverageWindowSize() + ); + long bytesToAdd = randomLongBetween(1000, 1000000); + pressureTracker.getDirectoryFileTransferTracker().addTransferredBytesFailed(bytesToAdd); + assertEquals(bytesToAdd, pressureTracker.getDirectoryFileTransferTracker().getTransferredBytesFailed()); + long moreBytesToAdd = randomLongBetween(1000, 10000); + pressureTracker.getDirectoryFileTransferTracker().addTransferredBytesFailed(moreBytesToAdd); + assertEquals(bytesToAdd + moreBytesToAdd, pressureTracker.getDirectoryFileTransferTracker().getTransferredBytesFailed()); + } + + public void testAddDownloadBytesSucceeded() { + pressureTracker = new RemoteSegmentTransferTracker( + shardId, + directoryFileTransferTracker, + pressureSettings.getUploadBytesMovingAverageWindowSize(), + pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), + pressureSettings.getUploadTimeMovingAverageWindowSize() + ); + long bytesToAdd = randomLongBetween(1000, 1000000); + pressureTracker.getDirectoryFileTransferTracker().addTransferredBytesSucceeded(bytesToAdd, System.currentTimeMillis()); + assertEquals(bytesToAdd, pressureTracker.getDirectoryFileTransferTracker().getTransferredBytesSucceeded()); + long moreBytesToAdd = randomLongBetween(1000, 10000); + pressureTracker.getDirectoryFileTransferTracker().addTransferredBytesSucceeded(moreBytesToAdd, System.currentTimeMillis()); + assertEquals(bytesToAdd + moreBytesToAdd, pressureTracker.getDirectoryFileTransferTracker().getTransferredBytesSucceeded()); + } + public void testGetInflightUploadBytes() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -211,8 +287,9 @@ public void testGetInflightUploadBytes() { } public void testIncrementTotalUploadsStarted() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -224,8 +301,9 @@ public void testIncrementTotalUploadsStarted() { } public void testIncrementTotalUploadsFailed() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -237,8 +315,9 @@ public void testIncrementTotalUploadsFailed() { } public void testIncrementTotalUploadSucceeded() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -250,8 +329,9 @@ public void testIncrementTotalUploadSucceeded() { } public void testGetInflightUploads() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -267,8 +347,9 @@ public void testGetInflightUploads() { } public void testIncrementRejectionCount() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -280,8 +361,9 @@ public void testIncrementRejectionCount() { } public void testGetConsecutiveFailureCount() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -295,8 +377,9 @@ public void testGetConsecutiveFailureCount() { } public void testComputeBytesLag() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -324,8 +407,9 @@ public void testComputeBytesLag() { } public void testIsUploadBytesAverageReady() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -351,8 +435,9 @@ public void testIsUploadBytesAverageReady() { } public void testIsUploadBytesPerSecAverageReady() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -378,8 +463,9 @@ public void testIsUploadBytesPerSecAverageReady() { } public void testIsUploadTimeMsAverageReady() { - pressureTracker = new RemoteRefreshSegmentTracker( + pressureTracker = new RemoteSegmentTransferTracker( shardId, + directoryFileTransferTracker, pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -404,12 +490,68 @@ public void testIsUploadTimeMsAverageReady() { assertEquals((double) sum / 20, pressureTracker.getUploadTimeMsAverage(), 0.0d); } + public void testIsDownloadBytesAverageReady() { + pressureTracker = new RemoteSegmentTransferTracker( + shardId, + directoryFileTransferTracker, + pressureSettings.getUploadBytesMovingAverageWindowSize(), + pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), + pressureSettings.getUploadTimeMovingAverageWindowSize() + ); + assertFalse(pressureTracker.getDirectoryFileTransferTracker().isTransferredBytesAverageReady()); + + long sum = 0; + for (int i = 1; i < 20; i++) { + pressureTracker.getDirectoryFileTransferTracker().updateLastSuccessfulTransferSize(i); + sum += i; + assertFalse(pressureTracker.getDirectoryFileTransferTracker().isTransferredBytesAverageReady()); + assertEquals((double) sum / i, pressureTracker.getDirectoryFileTransferTracker().getTransferredBytesAverage(), 0.0d); + } + + pressureTracker.getDirectoryFileTransferTracker().updateLastSuccessfulTransferSize(20); + sum += 20; + assertTrue(pressureTracker.getDirectoryFileTransferTracker().isTransferredBytesAverageReady()); + assertEquals((double) sum / 20, pressureTracker.getDirectoryFileTransferTracker().getTransferredBytesAverage(), 0.0d); + + pressureTracker.getDirectoryFileTransferTracker().updateLastSuccessfulTransferSize(100); + sum = sum + 100 - 1; + assertEquals((double) sum / 20, pressureTracker.getDirectoryFileTransferTracker().getTransferredBytesAverage(), 0.0d); + } + + public void testIsDownloadBytesPerSecAverageReady() { + pressureTracker = new RemoteSegmentTransferTracker( + shardId, + directoryFileTransferTracker, + pressureSettings.getUploadBytesMovingAverageWindowSize(), + pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), + pressureSettings.getUploadTimeMovingAverageWindowSize() + ); + assertFalse(pressureTracker.getDirectoryFileTransferTracker().isTransferredBytesPerSecAverageReady()); + + long sum = 0; + for (int i = 1; i < 20; i++) { + pressureTracker.getDirectoryFileTransferTracker().addTransferredBytesPerSec(i); + sum += i; + assertFalse(pressureTracker.getDirectoryFileTransferTracker().isTransferredBytesPerSecAverageReady()); + assertEquals((double) sum / i, pressureTracker.getDirectoryFileTransferTracker().getTransferredBytesPerSecAverage(), 0.0d); + } + + pressureTracker.getDirectoryFileTransferTracker().addTransferredBytesPerSec(20); + sum += 20; + assertTrue(pressureTracker.getDirectoryFileTransferTracker().isTransferredBytesPerSecAverageReady()); + assertEquals((double) sum / 20, pressureTracker.getDirectoryFileTransferTracker().getTransferredBytesPerSecAverage(), 0.0d); + + pressureTracker.getDirectoryFileTransferTracker().addTransferredBytesPerSec(100); + sum = sum + 100 - 1; + assertEquals((double) sum / 20, pressureTracker.getDirectoryFileTransferTracker().getTransferredBytesPerSecAverage(), 0.0d); + } + /** - * Tests whether RemoteRefreshSegmentTracker.Stats object generated correctly from RemoteRefreshSegmentTracker. + * Tests whether RemoteSegmentTransferTracker.Stats object generated correctly from RemoteSegmentTransferTracker. * */ public void testStatsObjectCreation() { pressureTracker = constructTracker(); - RemoteRefreshSegmentTracker.Stats pressureTrackerStats = pressureTracker.stats(); + RemoteSegmentTransferTracker.Stats pressureTrackerStats = pressureTracker.stats(); assertEquals(pressureTracker.getShardId(), pressureTrackerStats.shardId); assertEquals(pressureTracker.getTimeMsLag(), (int) pressureTrackerStats.refreshTimeLagMs); assertEquals(pressureTracker.getLocalRefreshSeqNo(), (int) pressureTrackerStats.localRefreshNumber); @@ -429,16 +571,16 @@ public void testStatsObjectCreation() { } /** - * Tests whether RemoteRefreshSegmentTracker.Stats object serialize and deserialize is working fine. + * Tests whether RemoteSegmentTransferTracker.Stats object serialize and deserialize is working fine. * This comes into play during internode data transfer. * */ public void testStatsObjectCreationViaStream() throws IOException { pressureTracker = constructTracker(); - RemoteRefreshSegmentTracker.Stats pressureTrackerStats = pressureTracker.stats(); + RemoteSegmentTransferTracker.Stats pressureTrackerStats = pressureTracker.stats(); try (BytesStreamOutput out = new BytesStreamOutput()) { pressureTrackerStats.writeTo(out); try (StreamInput in = out.bytes().streamInput()) { - RemoteRefreshSegmentTracker.Stats deserializedStats = new RemoteRefreshSegmentTracker.Stats(in); + RemoteSegmentTransferTracker.Stats deserializedStats = new RemoteSegmentTransferTracker.Stats(in); assertEquals(deserializedStats.shardId, pressureTrackerStats.shardId); assertEquals((int) deserializedStats.refreshTimeLagMs, (int) pressureTrackerStats.refreshTimeLagMs); assertEquals((int) deserializedStats.localRefreshNumber, (int) pressureTrackerStats.localRefreshNumber); @@ -459,13 +601,26 @@ public void testStatsObjectCreationViaStream() throws IOException { assertEquals((int) deserializedStats.totalUploadsStarted, (int) pressureTrackerStats.totalUploadsStarted); assertEquals((int) deserializedStats.totalUploadsSucceeded, (int) pressureTrackerStats.totalUploadsSucceeded); assertEquals((int) deserializedStats.totalUploadsFailed, (int) pressureTrackerStats.totalUploadsFailed); + assertEquals( + (int) deserializedStats.directoryFileTransferTrackerStats.transferredBytesStarted, + (int) pressureTrackerStats.directoryFileTransferTrackerStats.transferredBytesStarted + ); + assertEquals( + (int) deserializedStats.directoryFileTransferTrackerStats.transferredBytesSucceeded, + (int) pressureTrackerStats.directoryFileTransferTrackerStats.transferredBytesSucceeded + ); + assertEquals( + (int) deserializedStats.directoryFileTransferTrackerStats.transferredBytesPerSecMovingAverage, + (int) pressureTrackerStats.directoryFileTransferTrackerStats.transferredBytesPerSecMovingAverage + ); } } } - private RemoteRefreshSegmentTracker constructTracker() { - RemoteRefreshSegmentTracker segmentPressureTracker = new RemoteRefreshSegmentTracker( + private RemoteSegmentTransferTracker constructTracker() { + RemoteSegmentTransferTracker segmentPressureTracker = new RemoteSegmentTransferTracker( shardId, + new DirectoryFileTransferTracker(), pressureSettings.getUploadBytesMovingAverageWindowSize(), pressureSettings.getUploadBytesPerSecMovingAverageWindowSize(), pressureSettings.getUploadTimeMovingAverageWindowSize() @@ -475,6 +630,9 @@ private RemoteRefreshSegmentTracker constructTracker() { segmentPressureTracker.addUploadBytes(99); segmentPressureTracker.updateRemoteRefreshTimeMs(System.nanoTime() / 1_000_000L + randomIntBetween(10, 100)); segmentPressureTracker.incrementRejectionCount(); + segmentPressureTracker.getDirectoryFileTransferTracker().addTransferredBytesStarted(10); + segmentPressureTracker.getDirectoryFileTransferTracker().addTransferredBytesSucceeded(10, System.currentTimeMillis()); + segmentPressureTracker.getDirectoryFileTransferTracker().addTransferredBytesPerSec(5); return segmentPressureTracker; } } diff --git a/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java b/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java index 7c119bfbbc573..f13f89c6e067c 100644 --- a/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java @@ -26,7 +26,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.index.engine.InternalEngineFactory; import org.opensearch.index.remote.RemoteRefreshSegmentPressureService; -import org.opensearch.index.remote.RemoteRefreshSegmentTracker; +import org.opensearch.index.remote.RemoteSegmentTransferTracker; import org.opensearch.index.store.RemoteSegmentStoreDirectory; import org.opensearch.index.store.Store; import org.opensearch.indices.replication.checkpoint.SegmentReplicationCheckpointPublisher; @@ -255,7 +255,7 @@ public void testRefreshSuccessOnFirstAttempt() throws Exception { assertBusy(() -> assertEquals(0, refreshCountLatch.getCount())); assertBusy(() -> assertEquals(0, successLatch.getCount())); RemoteRefreshSegmentPressureService pressureService = tuple.v2(); - RemoteRefreshSegmentTracker segmentTracker = pressureService.getRemoteRefreshSegmentTracker(indexShard.shardId()); + RemoteSegmentTransferTracker segmentTracker = pressureService.getRemoteRefreshSegmentTracker(indexShard.shardId()); assertNoLagAndTotalUploadsFailed(segmentTracker, 0); } @@ -276,7 +276,7 @@ public void testRefreshSuccessOnSecondAttempt() throws Exception { assertBusy(() -> assertEquals(0, refreshCountLatch.getCount())); assertBusy(() -> assertEquals(0, successLatch.getCount())); RemoteRefreshSegmentPressureService pressureService = tuple.v2(); - RemoteRefreshSegmentTracker segmentTracker = pressureService.getRemoteRefreshSegmentTracker(indexShard.shardId()); + RemoteSegmentTransferTracker segmentTracker = pressureService.getRemoteRefreshSegmentTracker(indexShard.shardId()); assertNoLagAndTotalUploadsFailed(segmentTracker, 1); } @@ -322,11 +322,11 @@ public void testRefreshSuccessOnThirdAttempt() throws Exception { assertBusy(() -> assertEquals(0, refreshCountLatch.getCount())); assertBusy(() -> assertEquals(0, successLatch.getCount())); RemoteRefreshSegmentPressureService pressureService = tuple.v2(); - RemoteRefreshSegmentTracker segmentTracker = pressureService.getRemoteRefreshSegmentTracker(indexShard.shardId()); + RemoteSegmentTransferTracker segmentTracker = pressureService.getRemoteRefreshSegmentTracker(indexShard.shardId()); assertNoLagAndTotalUploadsFailed(segmentTracker, 2); } - private void assertNoLagAndTotalUploadsFailed(RemoteRefreshSegmentTracker segmentTracker, long totalUploadsFailed) throws Exception { + private void assertNoLagAndTotalUploadsFailed(RemoteSegmentTransferTracker segmentTracker, long totalUploadsFailed) throws Exception { assertBusy(() -> { assertEquals(0, segmentTracker.getBytesLag()); assertEquals(0, segmentTracker.getRefreshSeqNoLag()); @@ -339,7 +339,7 @@ public void testTrackerData() throws Exception { Tuple tuple = mockIndexShardWithRetryAndScheduleRefresh(1); RemoteStoreRefreshListener listener = tuple.v1(); RemoteRefreshSegmentPressureService pressureService = tuple.v2(); - RemoteRefreshSegmentTracker tracker = pressureService.getRemoteRefreshSegmentTracker(indexShard.shardId()); + RemoteSegmentTransferTracker tracker = pressureService.getRemoteRefreshSegmentTracker(indexShard.shardId()); assertNoLag(tracker); indexDocs(100, randomIntBetween(100, 200)); indexShard.refresh("test"); @@ -347,7 +347,7 @@ public void testTrackerData() throws Exception { assertBusy(() -> assertNoLag(tracker)); } - private void assertNoLag(RemoteRefreshSegmentTracker tracker) { + private void assertNoLag(RemoteSegmentTransferTracker tracker) { assertEquals(0, tracker.getRefreshSeqNoLag()); assertEquals(0, tracker.getBytesLag()); assertEquals(0, tracker.getTimeMsLag()); From 66ea4b3d736d676880b776039e0b901707cf7e7c Mon Sep 17 00:00:00 2001 From: Varun Bansal Date: Tue, 1 Aug 2023 18:54:46 +0530 Subject: [PATCH 35/75] Restrict user overrides for remote store related index settings (#8812) Signed-off-by: Varun Bansal --- CHANGELOG.md | 1 + .../index/SegmentReplicationPressureIT.java | 4 +- ...emoteStoreMockRepositoryIntegTestCase.java | 6 +- ...ateRemoteIndexClusterDefaultDocRepIT.java} | 16 +- .../remotestore/CreateRemoteIndexIT.java | 149 +++-- .../remotestore/PrimaryTermValidationIT.java | 1 + .../remotestore/RemoteIndexRecoveryIT.java | 99 +++- .../remotestore/RemoteRestoreSnapshotIT.java | 530 ++++++++++++++++++ .../RemoteStoreBaseIntegTestCase.java | 46 +- .../ReplicaToPrimaryPromotionIT.java | 6 +- .../SegmentReplicationUsingRemoteStoreIT.java | 15 +- ...tReplicationWithRemoteStorePressureIT.java | 18 +- .../RemoteStoreMultipartFileCorruptionIT.java | 52 +- .../opensearch/snapshots/CloneSnapshotIT.java | 27 +- .../snapshots/DeleteSnapshotIT.java | 64 +-- .../RemoteIndexSnapshotStatusApiIT.java | 209 +++++++ .../snapshots/RestoreSnapshotIT.java | 505 ----------------- .../snapshots/SnapshotStatusApisIT.java | 99 +--- .../cluster/metadata/IndexMetadata.java | 44 +- .../metadata/MetadataCreateIndexService.java | 56 +- .../common/settings/ClusterSettings.java | 2 +- .../common/settings/IndexScopedSettings.java | 2 +- .../opensearch/indices/IndicesService.java | 2 +- .../TransportRemoteStoreStatsActionTests.java | 3 + .../MetadataCreateIndexServiceTests.java | 237 +++----- .../opensearch/index/IndexSettingsTests.java | 41 +- ...oteRefreshSegmentPressureServiceTests.java | 6 +- .../index/seqno/ReplicationTrackerTests.java | 55 +- .../index/shard/IndexShardTests.java | 4 +- .../index/translog/RemoteFSTranslogTests.java | 2 + .../BlobStoreRepositoryHelperTests.java | 140 +++++ .../BlobStoreRepositoryRemoteIndexTests.java | 371 ++++++++++++ .../blobstore/BlobStoreRepositoryTests.java | 376 +------------ .../AbstractSnapshotIntegTestCase.java | 5 +- 34 files changed, 1731 insertions(+), 1462 deletions(-) rename server/src/internalClusterTest/java/org/opensearch/remotestore/{CreateRemoteIndexClusterDefaultDocRep.java => CreateRemoteIndexClusterDefaultDocRepIT.java} (82%) create mode 100644 server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteRestoreSnapshotIT.java create mode 100644 server/src/internalClusterTest/java/org/opensearch/snapshots/RemoteIndexSnapshotStatusApiIT.java create mode 100644 server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryHelperTests.java create mode 100644 server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryRemoteIndexTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 9e4a26caa2b10..cf101317bf81d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -48,6 +48,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Remote Segment Store Repository setting moved from `index.remote_store.repository` to `index.remote_store.segment.repository` and `cluster.remote_store.repository` to `cluster.remote_store.segment.repository` respectively for Index and Cluster level settings ([#8719](https://github.com/opensearch-project/OpenSearch/pull/8719)) - [Remote Store] Add support to restore only unassigned shards of an index ([#8792](https://github.com/opensearch-project/OpenSearch/pull/8792)) - Replace the deprecated IndexReader APIs with new storedFields() & termVectors() ([#7792](https://github.com/opensearch-project/OpenSearch/pull/7792)) +- [Remote Store] Restrict user override for remote store index level settings ([#8812](https://github.com/opensearch-project/OpenSearch/pull/8812)) ### Deprecated diff --git a/server/src/internalClusterTest/java/org/opensearch/index/SegmentReplicationPressureIT.java b/server/src/internalClusterTest/java/org/opensearch/index/SegmentReplicationPressureIT.java index 60ff82e617dbd..cf73c370cce8f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/SegmentReplicationPressureIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/SegmentReplicationPressureIT.java @@ -15,6 +15,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.lease.Releasable; +import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; import org.opensearch.index.shard.IndexShard; import org.opensearch.index.shard.IndexShardState; @@ -30,6 +31,7 @@ import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Objects; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -260,7 +262,7 @@ public void testFailStaleReplica() throws Exception { public void testWithDocumentReplicationEnabledIndex() throws Exception { assumeTrue( "Can't create DocRep index with remote store enabled. Skipping.", - indexSettings().getAsBoolean(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, false) == false + Objects.equals(featureFlagSettings().get(FeatureFlags.REMOTE_STORE, "false"), "false") ); Settings settings = Settings.builder().put(MAX_REPLICATION_TIME_SETTING.getKey(), TimeValue.timeValueMillis(500)).build(); // Starts a primary and replica node. diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/AbstractRemoteStoreMockRepositoryIntegTestCase.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/AbstractRemoteStoreMockRepositoryIntegTestCase.java index 709c027c3f347..5bfbbc11da77d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/AbstractRemoteStoreMockRepositoryIntegTestCase.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/AbstractRemoteStoreMockRepositoryIntegTestCase.java @@ -29,6 +29,7 @@ import java.util.Set; import java.util.stream.Collectors; +import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; public abstract class AbstractRemoteStoreMockRepositoryIntegTestCase extends AbstractSnapshotIntegTestCase { @@ -46,7 +47,7 @@ protected Settings featureFlagSettings() { public void setup() { FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); FeatureFlagSetter.set(FeatureFlags.SEGMENT_REPLICATION_EXPERIMENTAL); - internalCluster().startClusterManagerOnlyNode(); + internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(REPOSITORY_NAME, TRANSLOG_REPOSITORY_NAME)); } @Override @@ -62,9 +63,6 @@ protected Settings remoteStoreIndexSettings(int numberOfReplicas) { .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numberOfReplicas) .put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false) .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) - .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) - .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, REPOSITORY_NAME) - .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, TRANSLOG_REPOSITORY_NAME) .build(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexClusterDefaultDocRep.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexClusterDefaultDocRepIT.java similarity index 82% rename from server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexClusterDefaultDocRep.java rename to server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexClusterDefaultDocRepIT.java index 2abf4fc50ec69..32c02332e05b2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexClusterDefaultDocRep.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexClusterDefaultDocRepIT.java @@ -16,12 +16,16 @@ import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.test.OpenSearchIntegTestCase; +import java.util.Locale; + import static org.hamcrest.Matchers.containsString; +import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_STORE_ENABLED; +import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REPLICATION_TYPE; import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST) -public class CreateRemoteIndexClusterDefaultDocRep extends CreateRemoteIndexIT { +public class CreateRemoteIndexClusterDefaultDocRepIT extends CreateRemoteIndexIT { @Override protected Settings nodeSettings(int nodeOriginal) { @@ -44,7 +48,15 @@ public void testDefaultRemoteStoreNoUserOverride() throws Exception { ); assertThat( exc.getMessage(), - containsString("Cannot enable [index.remote_store.enabled] when [index.replication.type] is DOCUMENT") + containsString( + String.format( + Locale.ROOT, + "To enable %s, %s should be set to %s", + SETTING_REMOTE_STORE_ENABLED, + SETTING_REPLICATION_TYPE, + ReplicationType.SEGMENT + ) + ) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexIT.java index e52a12f66cff4..7683651e902b2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/CreateRemoteIndexIT.java @@ -26,14 +26,10 @@ import static org.hamcrest.Matchers.containsString; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_STORE_ENABLED; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY; -import static org.opensearch.index.IndexSettings.INDEX_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REPLICATION_TYPE; -import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_STORE_REPOSITORY_SETTING; -import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING; -import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_STORE_ENABLED_SETTING; -import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING; -import static org.opensearch.indices.IndicesService.CLUSTER_SETTING_REPLICATION_TYPE; +import static org.opensearch.index.IndexSettings.INDEX_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING; +import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST) @@ -50,10 +46,7 @@ public void teardown() { protected Settings nodeSettings(int nodeOriginal) { Settings settings = super.nodeSettings(nodeOriginal); Settings.Builder builder = Settings.builder() - .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT) - .put(CLUSTER_REMOTE_STORE_ENABLED_SETTING.getKey(), true) - .put(CLUSTER_REMOTE_STORE_REPOSITORY_SETTING.getKey(), "my-segment-repo-1") - .put(CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING.getKey(), "my-translog-repo-1") + .put(remoteStoreClusterSettings("my-segment-repo-1", "my-translog-repo-1")) .put(settings); return builder.build(); } @@ -111,19 +104,20 @@ public void testRemoteStoreDisabledByUser() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(SETTING_REMOTE_STORE_ENABLED, false) .build(); - assertAcked(client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get()); - GetIndexResponse getIndexResponse = client().admin() - .indices() - .getIndex(new GetIndexRequest().indices("test-idx-1").includeDefaults(true)) - .get(); - Settings indexSettings = getIndexResponse.settings().get("test-idx-1"); - verifyRemoteStoreIndexSettings( - indexSettings, - "false", - null, - null, - client().settings().get(CLUSTER_SETTING_REPLICATION_TYPE), - IndexSettings.DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL + + IllegalArgumentException exc = expectThrows( + IllegalArgumentException.class, + () -> client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get() + ); + assertThat( + exc.getMessage(), + containsString( + String.format( + Locale.ROOT, + "Validation Failed: 1: private index setting [%s] can not be set explicitly;", + SETTING_REMOTE_STORE_ENABLED + ) + ) ); } @@ -161,8 +155,8 @@ public void testRemoteStoreEnabledByUserWithoutRemoteRepoIllegalArgumentExceptio containsString( String.format( Locale.ROOT, - "Setting %s should be provided with non-empty repository ID", - SETTING_REMOTE_SEGMENT_STORE_REPOSITORY + "Validation Failed: 1: private index setting [%s] can not be set explicitly;", + SETTING_REMOTE_STORE_ENABLED ) ) ); @@ -174,19 +168,21 @@ public void testReplicationTypeDocumentByUser() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(SETTING_REPLICATION_TYPE, ReplicationType.DOCUMENT) .build(); - assertAcked(client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get()); - GetIndexResponse getIndexResponse = client().admin() - .indices() - .getIndex(new GetIndexRequest().indices("test-idx-1").includeDefaults(true)) - .get(); - Settings indexSettings = getIndexResponse.settings().get("test-idx-1"); - verifyRemoteStoreIndexSettings( - indexSettings, - null, - null, - null, - ReplicationType.DOCUMENT.toString(), - IndexSettings.DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL + IllegalArgumentException exc = expectThrows( + IllegalArgumentException.class, + () -> client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get() + ); + assertThat( + exc.getMessage(), + containsString( + String.format( + Locale.ROOT, + "To enable %s, %s should be set to %s", + SETTING_REMOTE_STORE_ENABLED, + SETTING_REPLICATION_TYPE, + ReplicationType.SEGMENT + ) + ) ); } @@ -213,7 +209,7 @@ public void testRemoteStoreSegmentRepoWithoutRemoteEnabledAndSegmentReplicationI ); } - public void testRemoteStoreEnabledByUserWithRemoteRepo() throws Exception { + public void testRemoteStoreEnabledByUserWithRemoteRepoIllegalArgumentException() throws Exception { Settings settings = Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) @@ -222,19 +218,20 @@ public void testRemoteStoreEnabledByUserWithRemoteRepo() throws Exception { .put(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, "my-custom-repo") .build(); - assertAcked(client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get()); - GetIndexResponse getIndexResponse = client().admin() - .indices() - .getIndex(new GetIndexRequest().indices("test-idx-1").includeDefaults(true)) - .get(); - Settings indexSettings = getIndexResponse.settings().get("test-idx-1"); - verifyRemoteStoreIndexSettings( - indexSettings, - "true", - "my-custom-repo", - "my-translog-repo-1", - ReplicationType.SEGMENT.toString(), - IndexSettings.DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL + IllegalArgumentException exc = expectThrows( + IllegalArgumentException.class, + () -> client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get() + ); + assertThat( + exc.getMessage(), + containsString( + String.format( + Locale.ROOT, + "Validation Failed: 1: private index setting [%s] can not be set explicitly;2: private index setting [%s] can not be set explicitly;", + SETTING_REMOTE_STORE_ENABLED, + SETTING_REMOTE_SEGMENT_STORE_REPOSITORY + ) + ) ); } @@ -270,41 +267,21 @@ public void testRemoteStoreOverrideTranslogRepoCorrectly() throws Exception { .put(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, "my-custom-repo") .put(SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, "my-custom-repo") .build(); - assertAcked(client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get()); - GetIndexResponse getIndexResponse = client().admin() - .indices() - .getIndex(new GetIndexRequest().indices("test-idx-1").includeDefaults(true)) - .get(); - Settings indexSettings = getIndexResponse.settings().get("test-idx-1"); - verifyRemoteStoreIndexSettings( - indexSettings, - "true", - "my-custom-repo", - "my-custom-repo", - ReplicationType.SEGMENT.toString(), - IndexSettings.DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL + IllegalArgumentException exc = expectThrows( + IllegalArgumentException.class, + () -> client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get() ); - } - - public void testRemoteStoreOverrideReplicationTypeIndexSettings() throws Exception { - Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(SETTING_REPLICATION_TYPE, ReplicationType.DOCUMENT) - .build(); - assertAcked(client().admin().indices().prepareCreate("test-idx-1").setSettings(settings).get()); - GetIndexResponse getIndexResponse = client().admin() - .indices() - .getIndex(new GetIndexRequest().indices("test-idx-1").includeDefaults(true)) - .get(); - Settings indexSettings = getIndexResponse.settings().get("test-idx-1"); - verifyRemoteStoreIndexSettings( - indexSettings, - null, - null, - null, - ReplicationType.DOCUMENT.toString(), - IndexSettings.DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL + assertThat( + exc.getMessage(), + containsString( + String.format( + Locale.ROOT, + "Validation Failed: 1: private index setting [%s] can not be set explicitly;2: private index setting [%s] can not be set explicitly;3: private index setting [%s] can not be set explicitly;", + SETTING_REMOTE_STORE_ENABLED, + SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, + SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY + ) + ) ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/PrimaryTermValidationIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/PrimaryTermValidationIT.java index 9d63c9b528314..ee32c880257d1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/PrimaryTermValidationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/PrimaryTermValidationIT.java @@ -61,6 +61,7 @@ public void testPrimaryTermValidation() throws Exception { .put(FollowersChecker.FOLLOWER_CHECK_TIMEOUT_SETTING.getKey(), "1s") .put(FollowersChecker.FOLLOWER_CHECK_INTERVAL_SETTING.getKey(), "1s") .put(FollowersChecker.FOLLOWER_CHECK_RETRY_COUNT_SETTING.getKey(), 1) + .put(remoteStoreClusterSettings(REPOSITORY_NAME, REPOSITORY_2_NAME, true)) .build(); internalCluster().startClusterManagerOnlyNode(clusterSettings); diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java index 4f7961cec22d7..d92ac83544a25 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java @@ -23,15 +23,21 @@ import java.nio.file.Path; +import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0) public class RemoteIndexRecoveryIT extends IndexRecoveryIT { - protected static final String REPOSITORY_NAME = "test-remore-store-repo"; + protected static final String REPOSITORY_NAME = "test-remote-store-repo"; protected Path absolutePath; + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)).put(remoteStoreClusterSettings(REPOSITORY_NAME)).build(); + } + @Override protected Settings featureFlagSettings() { return Settings.builder() @@ -57,9 +63,6 @@ public Settings indexSettings() { return Settings.builder() .put(super.indexSettings()) .put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false) - .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) - .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, REPOSITORY_NAME) - .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, REPOSITORY_NAME) .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "300s") .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) .build(); @@ -81,7 +84,91 @@ protected int numDocs() { } @Override - protected boolean shouldAssertOngoingRecoveryInRerouteRecovery() { - return false; + public void testUsesFileBasedRecoveryIfRetentionLeaseMissing() { + // Retention lease based tests not applicable for remote store; + } + + @Override + public void testPeerRecoveryTrimsLocalTranslog() { + // Peer recovery usecase not valid for remote enabled indices + } + + @Override + public void testHistoryRetention() { + // History retention not applicable for remote store + } + + @Override + public void testUsesFileBasedRecoveryIfOperationsBasedRecoveryWouldBeUnreasonable() { + // History retention not applicable for remote store + } + + @Override + public void testUsesFileBasedRecoveryIfRetentionLeaseAheadOfGlobalCheckpoint() { + // History retention not applicable for remote store + } + + @Override + public void testRecoverLocallyUpToGlobalCheckpoint() { + // History retention not applicable for remote store + } + + @Override + public void testCancelNewShardRecoveryAndUsesExistingShardCopy() { + // History retention not applicable for remote store + } + + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919") + @Override + public void testReservesBytesDuringPeerRecoveryPhaseOne() { + + } + + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919") + @Override + public void testAllocateEmptyPrimaryResetsGlobalCheckpoint() { + + } + + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919") + @Override + public void testDoesNotCopyOperationsInSafeCommit() { + + } + + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919") + @Override + public void testRepeatedRecovery() { + + } + + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919") + @Override + public void testDisconnectsWhileRecovering() { + + } + + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919") + @Override + public void testTransientErrorsDuringRecoveryAreRetried() { + + } + + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919") + @Override + public void testDoNotInfinitelyWaitForMapping() { + + } + + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919") + @Override + public void testDisconnectsDuringRecovery() { + + } + + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/8919") + @Override + public void testReplicaRecovery() { + } } diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteRestoreSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteRestoreSnapshotIT.java new file mode 100644 index 0000000000000..8c33bf36ad45d --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteRestoreSnapshotIT.java @@ -0,0 +1,530 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.remotestore; + +import org.junit.After; +import org.junit.Before; +import org.opensearch.action.DocWriteResponse; +import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreRequest; +import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; +import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; +import org.opensearch.action.admin.indices.get.GetIndexRequest; +import org.opensearch.action.admin.indices.get.GetIndexResponse; +import org.opensearch.action.delete.DeleteResponse; +import org.opensearch.action.support.PlainActionFuture; +import org.opensearch.client.Client; +import org.opensearch.client.Requests; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.io.PathUtils; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.index.IndexSettings; +import org.opensearch.indices.replication.common.ReplicationType; +import org.opensearch.snapshots.AbstractSnapshotIntegTestCase; +import org.opensearch.snapshots.SnapshotState; +import org.opensearch.test.InternalTestCluster; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.concurrent.ExecutionException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY; +import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_STORE_ENABLED; +import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; + +public class RemoteRestoreSnapshotIT extends AbstractSnapshotIntegTestCase { + private static final String BASE_REMOTE_REPO = "test-rs-repo" + TEST_REMOTE_STORE_REPO_SUFFIX; + private Path remoteRepoPath; + + @Before + public void setup() { + remoteRepoPath = randomRepoPath().toAbsolutePath(); + createRepository(BASE_REMOTE_REPO, "fs", remoteRepoPath); + } + + @After + public void teardown() { + assertAcked(clusterAdmin().prepareDeleteRepository(BASE_REMOTE_REPO)); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put(FeatureFlags.REMOTE_STORE, "true") + .put(remoteStoreClusterSettings(BASE_REMOTE_REPO)) + .build(); + } + + private Settings.Builder getIndexSettings(int numOfShards, int numOfReplicas) { + Settings.Builder settingsBuilder = Settings.builder() + .put(super.indexSettings()) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numOfShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numOfReplicas) + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "300s"); + return settingsBuilder; + } + + private void indexDocuments(Client client, String indexName, int numOfDocs) { + indexDocuments(client, indexName, 0, numOfDocs); + } + + private void indexDocuments(Client client, String indexName, int fromId, int toId) { + for (int i = fromId; i < toId; i++) { + String id = Integer.toString(i); + client.prepareIndex(indexName).setId(id).setSource("text", "sometext").get(); + } + client.admin().indices().prepareFlush(indexName).get(); + } + + private void assertDocsPresentInIndex(Client client, String indexName, int numOfDocs) { + for (int i = 0; i < numOfDocs; i++) { + String id = Integer.toString(i); + logger.info("checking for index " + indexName + " with docId" + id); + assertTrue("doc with id" + id + " is not present for index " + indexName, client.prepareGet(indexName, id).get().isExists()); + } + } + + public void testRestoreOperationsShallowCopyEnabled() throws IOException, ExecutionException, InterruptedException { + String clusterManagerNode = internalCluster().startClusterManagerOnlyNode(); + String primary = internalCluster().startDataOnlyNode(); + String indexName1 = "testindex1"; + String indexName2 = "testindex2"; + String snapshotRepoName = "test-restore-snapshot-repo"; + String snapshotName1 = "test-restore-snapshot1"; + String snapshotName2 = "test-restore-snapshot2"; + Path absolutePath1 = randomRepoPath().toAbsolutePath(); + logger.info("Snapshot Path [{}]", absolutePath1); + String restoredIndexName1 = indexName1 + "-restored"; + String restoredIndexName1Seg = indexName1 + "-restored-seg"; + String restoredIndexName1Doc = indexName1 + "-restored-doc"; + String restoredIndexName2 = indexName2 + "-restored"; + + createRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, true)); + + Client client = client(); + Settings indexSettings = getIndexSettings(1, 0).build(); + createIndex(indexName1, indexSettings); + + Settings indexSettings2 = getIndexSettings(1, 0).build(); + createIndex(indexName2, indexSettings2); + + final int numDocsInIndex1 = 5; + final int numDocsInIndex2 = 6; + indexDocuments(client, indexName1, numDocsInIndex1); + indexDocuments(client, indexName2, numDocsInIndex2); + ensureGreen(indexName1, indexName2); + + internalCluster().startDataOnlyNode(); + logger.info("--> snapshot"); + CreateSnapshotResponse createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(snapshotRepoName, snapshotName1) + .setWaitForCompletion(true) + .setIndices(indexName1, indexName2) + .get(); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertThat( + createSnapshotResponse.getSnapshotInfo().successfulShards(), + equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()) + ); + assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); + + updateRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, false)); + CreateSnapshotResponse createSnapshotResponse2 = client.admin() + .cluster() + .prepareCreateSnapshot(snapshotRepoName, snapshotName2) + .setWaitForCompletion(true) + .setIndices(indexName1, indexName2) + .get(); + assertThat(createSnapshotResponse2.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertThat( + createSnapshotResponse2.getSnapshotInfo().successfulShards(), + equalTo(createSnapshotResponse2.getSnapshotInfo().totalShards()) + ); + assertThat(createSnapshotResponse2.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); + + DeleteResponse deleteResponse = client().prepareDelete(indexName1, "0").execute().actionGet(); + assertEquals(deleteResponse.getResult(), DocWriteResponse.Result.DELETED); + indexDocuments(client, indexName1, numDocsInIndex1, numDocsInIndex1 + randomIntBetween(2, 5)); + ensureGreen(indexName1); + + RestoreSnapshotResponse restoreSnapshotResponse1 = client.admin() + .cluster() + .prepareRestoreSnapshot(snapshotRepoName, snapshotName1) + .setWaitForCompletion(false) + .setIndices(indexName1) + .setRenamePattern(indexName1) + .setRenameReplacement(restoredIndexName1) + .get(); + RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin() + .cluster() + .prepareRestoreSnapshot(snapshotRepoName, snapshotName2) + .setWaitForCompletion(false) + .setIndices(indexName2) + .setRenamePattern(indexName2) + .setRenameReplacement(restoredIndexName2) + .get(); + assertEquals(restoreSnapshotResponse1.status(), RestStatus.ACCEPTED); + assertEquals(restoreSnapshotResponse2.status(), RestStatus.ACCEPTED); + ensureGreen(restoredIndexName1, restoredIndexName2); + assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1); + assertDocsPresentInIndex(client, restoredIndexName2, numDocsInIndex2); + + // deleting data for restoredIndexName1 and restoring from remote store. + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primary)); + ensureRed(restoredIndexName1); + // Re-initialize client to make sure we are not using client from stopped node. + client = client(clusterManagerNode); + assertAcked(client.admin().indices().prepareClose(restoredIndexName1)); + client.admin() + .cluster() + .restoreRemoteStore( + new RestoreRemoteStoreRequest().indices(restoredIndexName1).restoreAllShards(true), + PlainActionFuture.newFuture() + ); + ensureYellowAndNoInitializingShards(restoredIndexName1); + ensureGreen(restoredIndexName1); + assertDocsPresentInIndex(client(), restoredIndexName1, numDocsInIndex1); + // indexing some new docs and validating + indexDocuments(client, restoredIndexName1, numDocsInIndex1, numDocsInIndex1 + 2); + ensureGreen(restoredIndexName1); + assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1 + 2); + + // restore index as seg rep enabled with remote store and remote translog disabled + RestoreSnapshotResponse restoreSnapshotResponse3 = client.admin() + .cluster() + .prepareRestoreSnapshot(snapshotRepoName, snapshotName1) + .setWaitForCompletion(false) + .setIgnoreIndexSettings(IndexMetadata.SETTING_REMOTE_STORE_ENABLED) + .setIndices(indexName1) + .setRenamePattern(indexName1) + .setRenameReplacement(restoredIndexName1Seg) + .get(); + assertEquals(restoreSnapshotResponse3.status(), RestStatus.ACCEPTED); + ensureGreen(restoredIndexName1Seg); + + GetIndexResponse getIndexResponse = client.admin() + .indices() + .getIndex(new GetIndexRequest().indices(restoredIndexName1Seg).includeDefaults(true)) + .get(); + indexSettings = getIndexResponse.settings().get(restoredIndexName1Seg); + assertNull(indexSettings.get(SETTING_REMOTE_STORE_ENABLED)); + assertNull(indexSettings.get(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, null)); + assertEquals(ReplicationType.SEGMENT.toString(), indexSettings.get(IndexMetadata.SETTING_REPLICATION_TYPE)); + assertDocsPresentInIndex(client, restoredIndexName1Seg, numDocsInIndex1); + // indexing some new docs and validating + indexDocuments(client, restoredIndexName1Seg, numDocsInIndex1, numDocsInIndex1 + 2); + ensureGreen(restoredIndexName1Seg); + assertDocsPresentInIndex(client, restoredIndexName1Seg, numDocsInIndex1 + 2); + + // restore index as doc rep based from shallow copy snapshot + RestoreSnapshotResponse restoreSnapshotResponse4 = client.admin() + .cluster() + .prepareRestoreSnapshot(snapshotRepoName, snapshotName1) + .setWaitForCompletion(false) + .setIgnoreIndexSettings(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, IndexMetadata.SETTING_REPLICATION_TYPE) + .setIndices(indexName1) + .setRenamePattern(indexName1) + .setRenameReplacement(restoredIndexName1Doc) + .get(); + assertEquals(restoreSnapshotResponse4.status(), RestStatus.ACCEPTED); + ensureGreen(restoredIndexName1Doc); + + getIndexResponse = client.admin() + .indices() + .getIndex(new GetIndexRequest().indices(restoredIndexName1Doc).includeDefaults(true)) + .get(); + indexSettings = getIndexResponse.settings().get(restoredIndexName1Doc); + assertNull(indexSettings.get(SETTING_REMOTE_STORE_ENABLED)); + assertNull(indexSettings.get(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, null)); + assertNull(indexSettings.get(IndexMetadata.SETTING_REPLICATION_TYPE)); + assertDocsPresentInIndex(client, restoredIndexName1Doc, numDocsInIndex1); + // indexing some new docs and validating + indexDocuments(client, restoredIndexName1Doc, numDocsInIndex1, numDocsInIndex1 + 2); + ensureGreen(restoredIndexName1Doc); + assertDocsPresentInIndex(client, restoredIndexName1Doc, numDocsInIndex1 + 2); + } + + public void testRestoreInSameRemoteStoreEnabledIndex() throws IOException { + String clusterManagerNode = internalCluster().startClusterManagerOnlyNode(); + String primary = internalCluster().startDataOnlyNode(); + String indexName1 = "testindex1"; + String indexName2 = "testindex2"; + String snapshotRepoName = "test-restore-snapshot-repo"; + String snapshotName1 = "test-restore-snapshot1"; + String snapshotName2 = "test-restore-snapshot2"; + Path absolutePath1 = randomRepoPath().toAbsolutePath(); + logger.info("Snapshot Path [{}]", absolutePath1); + String restoredIndexName2 = indexName2 + "-restored"; + + boolean enableShallowCopy = randomBoolean(); + createRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, enableShallowCopy)); + + Client client = client(); + Settings indexSettings = getIndexSettings(1, 0).build(); + createIndex(indexName1, indexSettings); + + Settings indexSettings2 = getIndexSettings(1, 0).build(); + createIndex(indexName2, indexSettings2); + + final int numDocsInIndex1 = 5; + final int numDocsInIndex2 = 6; + indexDocuments(client, indexName1, numDocsInIndex1); + indexDocuments(client, indexName2, numDocsInIndex2); + ensureGreen(indexName1, indexName2); + + internalCluster().startDataOnlyNode(); + logger.info("--> snapshot"); + CreateSnapshotResponse createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(snapshotRepoName, snapshotName1) + .setWaitForCompletion(true) + .setIndices(indexName1, indexName2) + .get(); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertThat( + createSnapshotResponse.getSnapshotInfo().successfulShards(), + equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()) + ); + assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); + + updateRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, false)); + CreateSnapshotResponse createSnapshotResponse2 = client.admin() + .cluster() + .prepareCreateSnapshot(snapshotRepoName, snapshotName2) + .setWaitForCompletion(true) + .setIndices(indexName1, indexName2) + .get(); + assertThat(createSnapshotResponse2.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertThat( + createSnapshotResponse2.getSnapshotInfo().successfulShards(), + equalTo(createSnapshotResponse2.getSnapshotInfo().totalShards()) + ); + assertThat(createSnapshotResponse2.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); + + DeleteResponse deleteResponse = client().prepareDelete(indexName1, "0").execute().actionGet(); + assertEquals(deleteResponse.getResult(), DocWriteResponse.Result.DELETED); + indexDocuments(client, indexName1, numDocsInIndex1, numDocsInIndex1 + randomIntBetween(2, 5)); + ensureGreen(indexName1); + + assertAcked(client().admin().indices().prepareClose(indexName1)); + + RestoreSnapshotResponse restoreSnapshotResponse1 = client.admin() + .cluster() + .prepareRestoreSnapshot(snapshotRepoName, snapshotName1) + .setWaitForCompletion(false) + .setIndices(indexName1) + .get(); + RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin() + .cluster() + .prepareRestoreSnapshot(snapshotRepoName, snapshotName2) + .setWaitForCompletion(false) + .setIndices(indexName2) + .setRenamePattern(indexName2) + .setRenameReplacement(restoredIndexName2) + .get(); + assertEquals(restoreSnapshotResponse1.status(), RestStatus.ACCEPTED); + assertEquals(restoreSnapshotResponse2.status(), RestStatus.ACCEPTED); + ensureGreen(indexName1, restoredIndexName2); + assertDocsPresentInIndex(client, indexName1, numDocsInIndex1); + assertDocsPresentInIndex(client, restoredIndexName2, numDocsInIndex2); + + // deleting data for restoredIndexName1 and restoring from remote store. + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primary)); + ensureRed(indexName1); + // Re-initialize client to make sure we are not using client from stopped node. + client = client(clusterManagerNode); + assertAcked(client.admin().indices().prepareClose(indexName1)); + client.admin() + .cluster() + .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(indexName1).restoreAllShards(true), PlainActionFuture.newFuture()); + ensureYellowAndNoInitializingShards(indexName1); + ensureGreen(indexName1); + assertDocsPresentInIndex(client(), indexName1, numDocsInIndex1); + // indexing some new docs and validating + indexDocuments(client, indexName1, numDocsInIndex1, numDocsInIndex1 + 2); + ensureGreen(indexName1); + assertDocsPresentInIndex(client, indexName1, numDocsInIndex1 + 2); + } + + public void testRestoreShallowCopySnapshotWithDifferentRepo() throws IOException { + String clusterManagerNode = internalCluster().startClusterManagerOnlyNode(); + String primary = internalCluster().startDataOnlyNode(); + String indexName1 = "testindex1"; + String indexName2 = "testindex2"; + String snapshotRepoName = "test-restore-snapshot-repo"; + String remoteStoreRepo2Name = "test-rs-repo-2" + TEST_REMOTE_STORE_REPO_SUFFIX; + String snapshotName1 = "test-restore-snapshot1"; + Path absolutePath1 = randomRepoPath().toAbsolutePath(); + Path absolutePath3 = randomRepoPath().toAbsolutePath(); + String restoredIndexName1 = indexName1 + "-restored"; + + createRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, false)); + createRepository(remoteStoreRepo2Name, "fs", absolutePath3); + + Client client = client(); + Settings indexSettings = getIndexSettings(1, 0).build(); + createIndex(indexName1, indexSettings); + + Settings indexSettings2 = getIndexSettings(1, 0).build(); + createIndex(indexName2, indexSettings2); + + final int numDocsInIndex1 = 5; + final int numDocsInIndex2 = 6; + indexDocuments(client, indexName1, numDocsInIndex1); + indexDocuments(client, indexName2, numDocsInIndex2); + ensureGreen(indexName1, indexName2); + + internalCluster().startDataOnlyNode(); + + logger.info("--> snapshot"); + CreateSnapshotResponse createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(snapshotRepoName, snapshotName1) + .setWaitForCompletion(true) + .setIndices(indexName1, indexName2) + .get(); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertThat( + createSnapshotResponse.getSnapshotInfo().successfulShards(), + equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()) + ); + assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); + + Settings remoteStoreIndexSettings = Settings.builder() + .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, remoteStoreRepo2Name) + .build(); + // restore index as a remote store index with different remote store repo + RestoreSnapshotResponse restoreSnapshotResponse = client.admin() + .cluster() + .prepareRestoreSnapshot(snapshotRepoName, snapshotName1) + .setWaitForCompletion(false) + .setIndexSettings(remoteStoreIndexSettings) + .setIndices(indexName1) + .setRenamePattern(indexName1) + .setRenameReplacement(restoredIndexName1) + .get(); + assertEquals(restoreSnapshotResponse.status(), RestStatus.ACCEPTED); + ensureGreen(restoredIndexName1); + assertDocsPresentInIndex(client(), restoredIndexName1, numDocsInIndex1); + + // deleting data for restoredIndexName1 and restoring from remote store. + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primary)); + // Re-initialize client to make sure we are not using client from stopped node. + client = client(clusterManagerNode); + assertAcked(client.admin().indices().prepareClose(restoredIndexName1)); + client.admin() + .cluster() + .restoreRemoteStore( + new RestoreRemoteStoreRequest().indices(restoredIndexName1).restoreAllShards(true), + PlainActionFuture.newFuture() + ); + ensureYellowAndNoInitializingShards(restoredIndexName1); + ensureGreen(restoredIndexName1); + // indexing some new docs and validating + assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1); + indexDocuments(client, restoredIndexName1, numDocsInIndex1, numDocsInIndex1 + 2); + ensureGreen(restoredIndexName1); + assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1 + 2); + } + + public void testRestoreShallowSnapshotRepositoryOverriden() throws ExecutionException, InterruptedException { + String indexName1 = "testindex1"; + String snapshotRepoName = "test-restore-snapshot-repo"; + String remoteStoreRepoNameUpdated = "test-rs-repo-updated" + TEST_REMOTE_STORE_REPO_SUFFIX; + String snapshotName1 = "test-restore-snapshot1"; + Path absolutePath1 = randomRepoPath().toAbsolutePath(); + Path absolutePath2 = randomRepoPath().toAbsolutePath(); + String[] pathTokens = absolutePath1.toString().split("/"); + String basePath = pathTokens[pathTokens.length - 1]; + Arrays.copyOf(pathTokens, pathTokens.length - 1); + Path location = PathUtils.get(String.join("/", pathTokens)); + pathTokens = absolutePath2.toString().split("/"); + String basePath2 = pathTokens[pathTokens.length - 1]; + Arrays.copyOf(pathTokens, pathTokens.length - 1); + Path location2 = PathUtils.get(String.join("/", pathTokens)); + logger.info("Path 1 [{}]", absolutePath1); + logger.info("Path 2 [{}]", absolutePath2); + String restoredIndexName1 = indexName1 + "-restored"; + + createRepository(snapshotRepoName, "fs", getRepositorySettings(location, basePath, true)); + + Client client = client(); + Settings indexSettings = Settings.builder() + .put(super.indexSettings()) + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "300s") + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .build(); + createIndex(indexName1, indexSettings); + + int numDocsInIndex1 = randomIntBetween(2, 5); + indexDocuments(client, indexName1, numDocsInIndex1); + + ensureGreen(indexName1); + + logger.info("--> snapshot"); + CreateSnapshotResponse createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(snapshotRepoName, snapshotName1) + .setWaitForCompletion(true) + .setIndices(indexName1) + .get(); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertThat( + createSnapshotResponse.getSnapshotInfo().successfulShards(), + equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()) + ); + assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); + + createRepository(BASE_REMOTE_REPO, "fs", absolutePath2); + + RestoreSnapshotResponse restoreSnapshotResponse = client.admin() + .cluster() + .prepareRestoreSnapshot(snapshotRepoName, snapshotName1) + .setWaitForCompletion(true) + .setIndices(indexName1) + .setRenamePattern(indexName1) + .setRenameReplacement(restoredIndexName1) + .get(); + + assertTrue(restoreSnapshotResponse.getRestoreInfo().failedShards() > 0); + + ensureRed(restoredIndexName1); + + client().admin().indices().close(Requests.closeIndexRequest(restoredIndexName1)).get(); + createRepository(remoteStoreRepoNameUpdated, "fs", remoteRepoPath); + RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin() + .cluster() + .prepareRestoreSnapshot(snapshotRepoName, snapshotName1) + .setWaitForCompletion(true) + .setIndices(indexName1) + .setRenamePattern(indexName1) + .setRenameReplacement(restoredIndexName1) + .setSourceRemoteStoreRepository(remoteStoreRepoNameUpdated) + .get(); + + assertTrue(restoreSnapshotResponse2.getRestoreInfo().failedShards() == 0); + ensureGreen(restoredIndexName1); + assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1); + + // indexing some new docs and validating + indexDocuments(client, restoredIndexName1, numDocsInIndex1, numDocsInIndex1 + 2); + ensureGreen(restoredIndexName1); + assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1 + 2); + } + +} diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java index 2887fbc56106c..4a85ff46d9025 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java @@ -29,11 +29,15 @@ import java.util.List; import java.util.concurrent.atomic.AtomicInteger; +import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING; +import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_STORE_ENABLED_SETTING; +import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING; +import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; public class RemoteStoreBaseIntegTestCase extends OpenSearchIntegTestCase { - protected static final String REPOSITORY_NAME = "test-remore-store-repo"; - protected static final String REPOSITORY_2_NAME = "test-remore-store-repo-2"; + protected static final String REPOSITORY_NAME = "test-remote-store-repo"; + protected static final String REPOSITORY_2_NAME = "test-remote-store-repo-2"; protected static final int SHARD_COUNT = 1; protected static final int REPLICA_COUNT = 1; protected Path absolutePath; @@ -51,6 +55,14 @@ protected boolean addMockInternalEngine() { return false; } + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put(remoteStoreClusterSettings(REPOSITORY_NAME, REPOSITORY_2_NAME, true)) + .build(); + } + @Override protected Settings featureFlagSettings() { return Settings.builder() @@ -64,21 +76,41 @@ public Settings indexSettings() { return defaultIndexSettings(); } - IndexResponse indexSingleDoc(String indexName) { + protected IndexResponse indexSingleDoc(String indexName) { return client().prepareIndex(indexName) .setId(UUIDs.randomBase64UUID()) .setSource(documentKeys.get(randomIntBetween(0, documentKeys.size() - 1)), randomAlphaOfLength(5)) .get(); } + public static Settings remoteStoreClusterSettings(String segmentRepoName) { + return remoteStoreClusterSettings(segmentRepoName, segmentRepoName); + } + + public static Settings remoteStoreClusterSettings( + String segmentRepoName, + String translogRepoName, + boolean randomizeSameRepoForRSSAndRTS + ) { + return remoteStoreClusterSettings( + segmentRepoName, + randomizeSameRepoForRSSAndRTS ? (randomBoolean() ? translogRepoName : segmentRepoName) : translogRepoName + ); + } + + public static Settings remoteStoreClusterSettings(String segmentRepoName, String translogRepoName) { + return Settings.builder() + .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT) + .put(CLUSTER_REMOTE_STORE_ENABLED_SETTING.getKey(), true) + .put(CLUSTER_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING.getKey(), segmentRepoName) + .put(CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING.getKey(), translogRepoName) + .build(); + } + private Settings defaultIndexSettings() { - boolean sameRepoForRSSAndRTS = randomBoolean(); return Settings.builder() .put(super.indexSettings()) .put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false) - .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) - .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, REPOSITORY_NAME) - .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, sameRepoForRSSAndRTS ? REPOSITORY_NAME : REPOSITORY_2_NAME) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, SHARD_COUNT) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, REPLICA_COUNT) .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "300s") diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java index 6764c50175e61..549b4985894a7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java @@ -39,11 +39,7 @@ public void setup() { @Override public Settings indexSettings() { - return Settings.builder() - .put(super.indexSettings()) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, shard_count) - .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, REPOSITORY_NAME) - .build(); + return Settings.builder().put(super.indexSettings()).put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, shard_count).build(); } public void testPromoteReplicaToPrimary() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreIT.java index f298fac7c894e..6f76c21cc0411 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreIT.java @@ -10,7 +10,6 @@ import org.junit.After; import org.junit.Before; -import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.FeatureFlags; import org.opensearch.indices.replication.SegmentReplicationIT; @@ -18,6 +17,7 @@ import java.nio.file.Path; +import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; /** @@ -32,13 +32,12 @@ public class SegmentReplicationUsingRemoteStoreIT extends SegmentReplicationIT { private static final String REPOSITORY_NAME = "test-remote-store-repo"; @Override - public Settings indexSettings() { - return Settings.builder() - .put(super.indexSettings()) - .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) - .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, REPOSITORY_NAME) - .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, REPOSITORY_NAME) - .build(); + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)).put(remoteStoreClusterSettings(REPOSITORY_NAME)).build(); + } + + protected boolean segmentReplicationWithRemoteEnabled() { + return true; } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationWithRemoteStorePressureIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationWithRemoteStorePressureIT.java index 0b64680033d84..38db7a7c7269e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationWithRemoteStorePressureIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationWithRemoteStorePressureIT.java @@ -10,15 +10,14 @@ import org.junit.After; import org.junit.Before; -import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.SegmentReplicationPressureIT; -import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.test.OpenSearchIntegTestCase; import java.nio.file.Path; +import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; /** @@ -32,14 +31,8 @@ public class SegmentReplicationWithRemoteStorePressureIT extends SegmentReplicat private static final String REPOSITORY_NAME = "test-remote-store-repo"; @Override - public Settings indexSettings() { - return Settings.builder() - .put(super.indexSettings()) - .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) - .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, REPOSITORY_NAME) - .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, REPOSITORY_NAME) - .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) - .build(); + protected boolean segmentReplicationWithRemoteEnabled() { + return true; } @Override @@ -51,6 +44,11 @@ protected Settings featureFlagSettings() { .build(); } + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)).put(remoteStoreClusterSettings(REPOSITORY_NAME)).build(); + } + @Before public void setup() { internalCluster().startClusterManagerOnlyNode(); diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/multipart/RemoteStoreMultipartFileCorruptionIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/multipart/RemoteStoreMultipartFileCorruptionIT.java index 529e84d281476..b801c28983890 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/multipart/RemoteStoreMultipartFileCorruptionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/multipart/RemoteStoreMultipartFileCorruptionIT.java @@ -8,31 +8,22 @@ package org.opensearch.remotestore.multipart; -import org.junit.After; import org.junit.Before; -import org.opensearch.action.index.IndexResponse; import org.opensearch.action.support.IndicesOptions; import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.common.UUIDs; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.IndexModule; import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.plugins.Plugin; -import org.opensearch.remotestore.multipart.mocks.MockFsRepository; +import org.opensearch.remotestore.RemoteStoreBaseIntegTestCase; import org.opensearch.remotestore.multipart.mocks.MockFsRepositoryPlugin; -import org.opensearch.test.OpenSearchIntegTestCase; -import java.nio.file.Path; import java.util.Collection; import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; +public class RemoteStoreMultipartFileCorruptionIT extends RemoteStoreBaseIntegTestCase { -public class RemoteStoreMultipartFileCorruptionIT extends OpenSearchIntegTestCase { - - protected static final String REPOSITORY_NAME = "test-remore-store-repo"; private static final String INDEX_NAME = "remote-store-test-idx-1"; @Override @@ -40,34 +31,9 @@ protected Collection> nodePlugins() { return Stream.concat(super.nodePlugins().stream(), Stream.of(MockFsRepositoryPlugin.class)).collect(Collectors.toList()); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.REMOTE_STORE, "true").build(); - } - @Before public void setup() { - internalCluster().startClusterManagerOnlyNode(); - Path absolutePath = randomRepoPath().toAbsolutePath(); - putRepository(absolutePath); - } - - protected void putRepository(Path path) { - assertAcked( - clusterAdmin().preparePutRepository(REPOSITORY_NAME) - .setType(MockFsRepositoryPlugin.TYPE) - .setSettings( - Settings.builder() - .put("location", path) - // custom setting for MockFsRepositoryPlugin - .put(MockFsRepository.TRIGGER_DATA_INTEGRITY_FAILURE.getKey(), true) - ) - ); - } - - @After - public void teardown() { - assertAcked(clusterAdmin().prepareDeleteRepository(REPOSITORY_NAME)); + setupRepo(); } protected Settings remoteStoreIndexSettings() { @@ -78,26 +44,16 @@ protected Settings remoteStoreIndexSettings() { .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false) .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) - .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) - .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, REPOSITORY_NAME) - .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, REPOSITORY_NAME) .build(); } - private IndexResponse indexSingleDoc() { - return client().prepareIndex(INDEX_NAME) - .setId(UUIDs.randomBase64UUID()) - .setSource(randomAlphaOfLength(5), randomAlphaOfLength(5)) - .get(); - } - public void testLocalFileCorruptionDuringUpload() { internalCluster().startDataOnlyNodes(1); createIndex(INDEX_NAME, remoteStoreIndexSettings()); ensureYellowAndNoInitializingShards(INDEX_NAME); ensureGreen(INDEX_NAME); - indexSingleDoc(); + indexSingleDoc(INDEX_NAME); client().admin() .indices() diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/CloneSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/CloneSnapshotIT.java index 3de982f89ac80..d19cb513ed38d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/CloneSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/CloneSnapshotIT.java @@ -54,6 +54,7 @@ import java.util.Collection; import java.util.List; +import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; @@ -162,7 +163,8 @@ public void testCloneSnapshotIndex() throws Exception { public void testCloneShallowSnapshotIndex() throws Exception { disableRepoConsistencyCheck("This test uses remote store repository"); FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - internalCluster().startClusterManagerOnlyNode(); + final String remoteStoreRepoName = "remote-store-repo-name"; + internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(remoteStoreRepoName)); internalCluster().startDataOnlyNode(); final String snapshotRepoName = "snapshot-repo-name"; @@ -174,14 +176,13 @@ public void testCloneShallowSnapshotIndex() throws Exception { createRepository(shallowSnapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy(shallowSnapshotRepoPath)); final Path remoteStoreRepoPath = randomRepoPath(); - final String remoteStoreRepoName = "remote-store-repo-name"; createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath); final String indexName = "index-1"; createIndexWithRandomDocs(indexName, randomIntBetween(5, 10)); final String remoteStoreEnabledIndexName = "remote-index-1"; - final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName); + final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(); createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10)); @@ -209,7 +210,10 @@ public void testCloneShallowSnapshotIndex() throws Exception { public void testShallowCloneNameAvailability() throws Exception { disableRepoConsistencyCheck("This test uses remote store repository"); FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - internalCluster().startClusterManagerOnlyNode(LARGE_SNAPSHOT_POOL_SETTINGS); + final String remoteStoreRepoName = "remote-store-repo-name"; + internalCluster().startClusterManagerOnlyNode( + Settings.builder().put(LARGE_SNAPSHOT_POOL_SETTINGS).put(remoteStoreClusterSettings(remoteStoreRepoName)).build() + ); internalCluster().startDataOnlyNode(); final String shallowSnapshotRepoName = "shallow-snapshot-repo-name"; @@ -217,14 +221,13 @@ public void testShallowCloneNameAvailability() throws Exception { createRepository(shallowSnapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy(shallowSnapshotRepoPath)); final Path remoteStoreRepoPath = randomRepoPath(); - final String remoteStoreRepoName = "remote-store-repo-name"; createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath); final String indexName = "index-1"; createIndexWithRandomDocs(indexName, randomIntBetween(5, 10)); final String remoteStoreEnabledIndexName = "remote-index-1"; - final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName); + final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(); createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10)); @@ -244,7 +247,8 @@ public void testShallowCloneNameAvailability() throws Exception { public void testCloneAfterRepoShallowSettingEnabled() throws Exception { disableRepoConsistencyCheck("This test uses remote store repository"); FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - internalCluster().startClusterManagerOnlyNode(); + final String remoteStoreRepoName = "remote-store-repo-name"; + internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(remoteStoreRepoName)); internalCluster().startDataOnlyNode(); final String snapshotRepoName = "snapshot-repo-name"; @@ -252,14 +256,13 @@ public void testCloneAfterRepoShallowSettingEnabled() throws Exception { createRepository(snapshotRepoName, "fs", snapshotRepoPath); final Path remoteStoreRepoPath = randomRepoPath(); - final String remoteStoreRepoName = "remote-store-repo-name"; createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath); final String indexName = "index-1"; createIndexWithRandomDocs(indexName, randomIntBetween(5, 10)); final String remoteStoreEnabledIndexName = "remote-index-1"; - final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName); + final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(); createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10)); @@ -281,7 +284,8 @@ public void testCloneAfterRepoShallowSettingEnabled() throws Exception { public void testCloneAfterRepoShallowSettingDisabled() throws Exception { disableRepoConsistencyCheck("This test uses remote store repository"); FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - internalCluster().startClusterManagerOnlyNode(); + final String remoteStoreRepoName = "remote-store-repo-name"; + internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(remoteStoreRepoName)); internalCluster().startDataOnlyNode(); final String snapshotRepoName = "snapshot-repo-name"; @@ -289,14 +293,13 @@ public void testCloneAfterRepoShallowSettingDisabled() throws Exception { createRepository(snapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy(snapshotRepoPath)); final Path remoteStoreRepoPath = randomRepoPath(); - final String remoteStoreRepoName = "remote-store-repo-name"; createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath); final String indexName = "index-1"; createIndexWithRandomDocs(indexName, randomIntBetween(5, 10)); final String remoteStoreEnabledIndexName = "remote-index-1"; - final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName); + final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(); createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10)); diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java index 2688449294f3d..b12fbdd2a9bd7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java @@ -25,15 +25,18 @@ import java.util.stream.Stream; import static org.hamcrest.Matchers.is; +import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0) public class DeleteSnapshotIT extends AbstractSnapshotIntegTestCase { + private static final String REMOTE_REPO_NAME = "remote-store-repo-name"; + public void testDeleteSnapshot() throws Exception { disableRepoConsistencyCheck("Remote store repository is being used in the test"); FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - internalCluster().startClusterManagerOnlyNode(); + internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(REMOTE_REPO_NAME)); internalCluster().startDataOnlyNode(); final String snapshotRepoName = "snapshot-repo-name"; @@ -41,20 +44,19 @@ public void testDeleteSnapshot() throws Exception { createRepository(snapshotRepoName, "fs", snapshotRepoPath); final Path remoteStoreRepoPath = randomRepoPath(); - final String remoteStoreRepoName = "remote-store-repo-name"; - createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath); + createRepository(REMOTE_REPO_NAME, "fs", remoteStoreRepoPath); final String indexName = "index-1"; createIndexWithRandomDocs(indexName, randomIntBetween(5, 10)); final String remoteStoreEnabledIndexName = "remote-index-1"; - final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName); + final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(); createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10)); final String snapshot = "snapshot"; createFullSnapshot(snapshotRepoName, snapshot); - assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 0); + assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == 0); assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == 1); assertAcked(startDeleteSnapshot(snapshotRepoName, snapshot).get()); @@ -64,32 +66,31 @@ public void testDeleteSnapshot() throws Exception { public void testDeleteShallowCopySnapshot() throws Exception { disableRepoConsistencyCheck("Remote store repository is being used in the test"); FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - internalCluster().startClusterManagerOnlyNode(); + internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(REMOTE_REPO_NAME)); internalCluster().startDataOnlyNode(); final String snapshotRepoName = "snapshot-repo-name"; createRepository(snapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy()); final Path remoteStoreRepoPath = randomRepoPath(); - final String remoteStoreRepoName = "remote-store-repo-name"; - createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath); + createRepository(REMOTE_REPO_NAME, "fs", remoteStoreRepoPath); final String indexName = "index-1"; createIndexWithRandomDocs(indexName, randomIntBetween(5, 10)); final String remoteStoreEnabledIndexName = "remote-index-1"; - final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName); + final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(); createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10)); final String shallowSnapshot = "shallow-snapshot"; createFullSnapshot(snapshotRepoName, shallowSnapshot); - assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 1); + assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == 1); assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == 1); assertAcked(startDeleteSnapshot(snapshotRepoName, shallowSnapshot).get()); assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == 0); - assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 0); + assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == 0); } // Deleting multiple shallow copy snapshots as part of single delete call with repo having only shallow copy snapshots. @@ -97,23 +98,22 @@ public void testDeleteMultipleShallowCopySnapshotsCase1() throws Exception { disableRepoConsistencyCheck("Remote store repository is being used in the test"); FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - internalCluster().startClusterManagerOnlyNode(); + internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(REMOTE_REPO_NAME)); internalCluster().startDataOnlyNode(); final Client clusterManagerClient = internalCluster().clusterManagerClient(); ensureStableCluster(2); + final Path remoteStoreRepoPath = randomRepoPath(); + createRepository(REMOTE_REPO_NAME, "fs", remoteStoreRepoPath); + final String snapshotRepoName = "snapshot-repo-name"; final Path snapshotRepoPath = randomRepoPath(); createRepository(snapshotRepoName, "mock", snapshotRepoSettingsForShallowCopy(snapshotRepoPath)); final String testIndex = "index-test"; createIndexWithContent(testIndex); - final Path remoteStoreRepoPath = randomRepoPath(); - final String remoteStoreRepoName = "remote-store-repo-name"; - createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath); - final String remoteStoreEnabledIndexName = "remote-index-1"; - final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName); + final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(); createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10)); @@ -122,7 +122,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase1() throws Exception { List shallowCopySnapshots = createNSnapshots(snapshotRepoName, totalShallowCopySnapshotsCount); List snapshotsToBeDeleted = shallowCopySnapshots.subList(0, randomIntBetween(2, totalShallowCopySnapshotsCount)); int tobeDeletedSnapshotsCount = snapshotsToBeDeleted.size(); - assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == totalShallowCopySnapshotsCount); + assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == totalShallowCopySnapshotsCount); assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == totalShallowCopySnapshotsCount); // Deleting subset of shallow copy snapshots assertAcked( @@ -132,7 +132,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase1() throws Exception { .get() ); assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == totalShallowCopySnapshotsCount - tobeDeletedSnapshotsCount); - assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == totalShallowCopySnapshotsCount + assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == totalShallowCopySnapshotsCount - tobeDeletedSnapshotsCount); } @@ -144,7 +144,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase2() throws Exception { disableRepoConsistencyCheck("Remote store repository is being used in the test"); FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - internalCluster().startClusterManagerOnlyNode(); + internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(REMOTE_REPO_NAME)); final String dataNode = internalCluster().startDataOnlyNode(); ensureStableCluster(2); final String clusterManagerNode = internalCluster().getClusterManagerName(); @@ -156,11 +156,10 @@ public void testDeleteMultipleShallowCopySnapshotsCase2() throws Exception { createIndexWithContent(testIndex); final Path remoteStoreRepoPath = randomRepoPath(); - final String remoteStoreRepoName = "remote-store-repo-name"; - createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath); + createRepository(REMOTE_REPO_NAME, "fs", remoteStoreRepoPath); final String remoteStoreEnabledIndexName = "remote-index-1"; - final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName); + final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(); createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10)); @@ -201,7 +200,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase2() throws Exception { int totalSnapshotsCount = totalFullCopySnapshotsCount + totalShallowCopySnapshotsCount; - assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == totalShallowCopySnapshotsCount); + assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == totalShallowCopySnapshotsCount); assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == totalSnapshotsCount); // Deleting subset of shallow copy snapshots assertAcked( @@ -213,7 +212,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase2() throws Exception { totalSnapshotsCount -= tobeDeletedShallowCopySnapshotsCount; totalShallowCopySnapshotsCount -= tobeDeletedShallowCopySnapshotsCount; assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == totalSnapshotsCount); - assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == totalShallowCopySnapshotsCount); + assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == totalShallowCopySnapshotsCount); // Deleting subset of full copy snapshots assertAcked( @@ -224,7 +223,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase2() throws Exception { ); totalSnapshotsCount -= tobeDeletedFullCopySnapshotsCount; assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == totalSnapshotsCount); - assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == totalShallowCopySnapshotsCount); + assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == totalShallowCopySnapshotsCount); } // Deleting subset of shallow and full copy snapshots as part of single delete call and then deleting all snapshots in the repo. @@ -233,7 +232,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase3() throws Exception { disableRepoConsistencyCheck("Remote store repository is being used in the test"); FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - internalCluster().startClusterManagerOnlyNode(); + internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(REMOTE_REPO_NAME)); internalCluster().startDataOnlyNode(); final Client clusterManagerClient = internalCluster().clusterManagerClient(); ensureStableCluster(2); @@ -245,11 +244,10 @@ public void testDeleteMultipleShallowCopySnapshotsCase3() throws Exception { createIndexWithContent(testIndex); final Path remoteStoreRepoPath = randomRepoPath(); - final String remoteStoreRepoName = "remote-store-repo-name"; - createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath); + createRepository(REMOTE_REPO_NAME, "fs", remoteStoreRepoPath); final String remoteStoreEnabledIndexName = "remote-index-1"; - final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName); + final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(); createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10)); @@ -268,7 +266,7 @@ public void testDeleteMultipleShallowCopySnapshotsCase3() throws Exception { int totalSnapshotsCount = totalFullCopySnapshotsCount + totalShallowCopySnapshotsCount; - assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == totalShallowCopySnapshotsCount); + assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == totalShallowCopySnapshotsCount); assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == totalSnapshotsCount); // Deleting subset of shallow copy snapshots and full copy snapshots assertAcked( @@ -283,12 +281,12 @@ public void testDeleteMultipleShallowCopySnapshotsCase3() throws Exception { totalSnapshotsCount -= (tobeDeletedShallowCopySnapshotsCount + tobeDeletedFullCopySnapshotsCount); totalShallowCopySnapshotsCount -= tobeDeletedShallowCopySnapshotsCount; assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == totalSnapshotsCount); - assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == totalShallowCopySnapshotsCount); + assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == totalShallowCopySnapshotsCount); // Deleting all the remaining snapshots assertAcked(clusterManagerClient.admin().cluster().prepareDeleteSnapshot(snapshotRepoName, "*").get()); assert (getRepositoryData(snapshotRepoName).getSnapshotIds().size() == 0); - assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 0); + assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == 0); } private List createNSnapshots(String repoName, int count) { diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/RemoteIndexSnapshotStatusApiIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/RemoteIndexSnapshotStatusApiIT.java new file mode 100644 index 0000000000000..b6a5188c99335 --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/RemoteIndexSnapshotStatusApiIT.java @@ -0,0 +1,209 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.snapshots; + +import org.opensearch.action.ActionFuture; +import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; +import org.opensearch.action.admin.cluster.snapshots.status.SnapshotIndexShardStage; +import org.opensearch.action.admin.cluster.snapshots.status.SnapshotIndexShardStatus; +import org.opensearch.action.admin.cluster.snapshots.status.SnapshotStatus; +import org.opensearch.cluster.SnapshotsInProgress; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; +import org.opensearch.threadpool.ThreadPool; + +import java.nio.file.Path; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; +import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; + +public class RemoteIndexSnapshotStatusApiIT extends AbstractSnapshotIntegTestCase { + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put(ThreadPool.ESTIMATED_TIME_INTERVAL_SETTING.getKey(), 0) // We have tests that check by-timestamp order + .put(FeatureFlags.REMOTE_STORE, "true") + .put(remoteStoreClusterSettings("remote-store-repo-name")) + .build(); + } + + public void testStatusAPICallForShallowCopySnapshot() throws Exception { + disableRepoConsistencyCheck("Remote store repository is being used for the test"); + internalCluster().startClusterManagerOnlyNode(); + internalCluster().startDataOnlyNode(); + + final String snapshotRepoName = "snapshot-repo-name"; + createRepository(snapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy()); + + final Path remoteStoreRepoPath = randomRepoPath(); + final String remoteStoreRepoName = "remote-store-repo-name"; + createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath); + + final String remoteStoreEnabledIndexName = "remote-index-1"; + final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(); + createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); + ensureGreen(); + + logger.info("--> indexing some data"); + for (int i = 0; i < 100; i++) { + index(remoteStoreEnabledIndexName, "_doc", Integer.toString(i), "foo", "bar" + i); + } + refresh(); + + final String snapshot = "snapshot"; + createFullSnapshot(snapshotRepoName, snapshot); + assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 1); + + final SnapshotStatus snapshotStatus = getSnapshotStatus(snapshotRepoName, snapshot); + assertThat(snapshotStatus.getState(), is(SnapshotsInProgress.State.SUCCESS)); + + // Validating that the incremental file count and incremental file size is zero for shallow copy + final SnapshotIndexShardStatus shallowSnapshotShardState = stateFirstShard(snapshotStatus, remoteStoreEnabledIndexName); + assertThat(shallowSnapshotShardState.getStage(), is(SnapshotIndexShardStage.DONE)); + assertThat(shallowSnapshotShardState.getStats().getTotalFileCount(), greaterThan(0)); + assertThat(shallowSnapshotShardState.getStats().getTotalSize(), greaterThan(0L)); + assertThat(shallowSnapshotShardState.getStats().getIncrementalFileCount(), is(0)); + assertThat(shallowSnapshotShardState.getStats().getIncrementalSize(), is(0L)); + } + + public void testStatusAPIStatsForBackToBackShallowSnapshot() throws Exception { + disableRepoConsistencyCheck("Remote store repository is being used for the test"); + internalCluster().startClusterManagerOnlyNode(); + internalCluster().startDataOnlyNode(); + + final String snapshotRepoName = "snapshot-repo-name"; + createRepository(snapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy()); + + final Path remoteStoreRepoPath = randomRepoPath(); + final String remoteStoreRepoName = "remote-store-repo-name"; + createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath); + + final String remoteStoreEnabledIndexName = "remote-index-1"; + final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(); + createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); + ensureGreen(); + + logger.info("--> indexing some data"); + for (int i = 0; i < 100; i++) { + index(remoteStoreEnabledIndexName, "_doc", Integer.toString(i), "foo", "bar" + i); + } + refresh(); + + createFullSnapshot(snapshotRepoName, "test-snap-1"); + assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 1); + + SnapshotStatus snapshotStatus = getSnapshotStatus(snapshotRepoName, "test-snap-1"); + assertThat(snapshotStatus.getState(), is(SnapshotsInProgress.State.SUCCESS)); + + SnapshotIndexShardStatus shallowSnapshotShardState = stateFirstShard(snapshotStatus, remoteStoreEnabledIndexName); + assertThat(shallowSnapshotShardState.getStage(), is(SnapshotIndexShardStage.DONE)); + final int totalFileCount = shallowSnapshotShardState.getStats().getTotalFileCount(); + final long totalSize = shallowSnapshotShardState.getStats().getTotalSize(); + final int incrementalFileCount = shallowSnapshotShardState.getStats().getIncrementalFileCount(); + final long incrementalSize = shallowSnapshotShardState.getStats().getIncrementalSize(); + + createFullSnapshot(snapshotRepoName, "test-snap-2"); + assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 2); + + snapshotStatus = getSnapshotStatus(snapshotRepoName, "test-snap-2"); + assertThat(snapshotStatus.getState(), is(SnapshotsInProgress.State.SUCCESS)); + shallowSnapshotShardState = stateFirstShard(snapshotStatus, remoteStoreEnabledIndexName); + assertThat(shallowSnapshotShardState.getStats().getTotalFileCount(), equalTo(totalFileCount)); + assertThat(shallowSnapshotShardState.getStats().getTotalSize(), equalTo(totalSize)); + assertThat(shallowSnapshotShardState.getStats().getIncrementalFileCount(), equalTo(incrementalFileCount)); + assertThat(shallowSnapshotShardState.getStats().getIncrementalSize(), equalTo(incrementalSize)); + } + + public void testStatusAPICallInProgressShallowSnapshot() throws Exception { + disableRepoConsistencyCheck("Remote store repository is being used for the test"); + internalCluster().startClusterManagerOnlyNode(); + internalCluster().startDataOnlyNode(); + + final String snapshotRepoName = "snapshot-repo-name"; + createRepository(snapshotRepoName, "mock", snapshotRepoSettingsForShallowCopy().put("block_on_data", true)); + + final Path remoteStoreRepoPath = randomRepoPath(); + final String remoteStoreRepoName = "remote-store-repo-name"; + createRepository(remoteStoreRepoName, "mock", remoteStoreRepoPath); + + final String remoteStoreEnabledIndexName = "remote-index-1"; + final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(); + createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); + ensureGreen(); + + logger.info("--> indexing some data"); + for (int i = 0; i < 100; i++) { + index(remoteStoreEnabledIndexName, "_doc", Integer.toString(i), "foo", "bar" + i); + } + refresh(); + + logger.info("--> snapshot"); + ActionFuture createSnapshotResponseActionFuture = startFullSnapshot(snapshotRepoName, "test-snap"); + + logger.info("--> wait for data nodes to get blocked"); + awaitNumberOfSnapshotsInProgress(1); + assertEquals( + SnapshotsInProgress.State.STARTED, + client().admin() + .cluster() + .prepareSnapshotStatus(snapshotRepoName) + .setSnapshots("test-snap") + .get() + .getSnapshots() + .get(0) + .getState() + ); + + logger.info("--> unblock all data nodes"); + unblockAllDataNodes(snapshotRepoName); + + logger.info("--> wait for snapshot to finish"); + createSnapshotResponseActionFuture.actionGet(); + } + + private static SnapshotIndexShardStatus stateFirstShard(SnapshotStatus snapshotStatus, String indexName) { + return snapshotStatus.getIndices().get(indexName).getShards().get(0); + } + + private static SnapshotStatus getSnapshotStatus(String repoName, String snapshotName) { + try { + return client().admin().cluster().prepareSnapshotStatus(repoName).setSnapshots(snapshotName).get().getSnapshots().get(0); + } catch (SnapshotMissingException e) { + throw new AssertionError(e); + } + } +} diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java index 8677e61efeb46..0274ca874c7b9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/RestoreSnapshotIT.java @@ -33,45 +33,30 @@ package org.opensearch.snapshots; import org.opensearch.action.ActionFuture; -import org.opensearch.action.DocWriteResponse; -import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreRequest; import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; -import org.opensearch.action.admin.indices.get.GetIndexRequest; -import org.opensearch.action.admin.indices.get.GetIndexResponse; import org.opensearch.action.admin.indices.settings.get.GetSettingsResponse; import org.opensearch.action.admin.indices.template.delete.DeleteIndexTemplateRequestBuilder; import org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse; -import org.opensearch.action.delete.DeleteResponse; import org.opensearch.action.index.IndexRequestBuilder; -import org.opensearch.action.support.PlainActionFuture; import org.opensearch.client.Client; -import org.opensearch.client.Requests; import org.opensearch.cluster.block.ClusterBlocks; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.metadata.MappingMetadata; -import org.opensearch.common.io.PathUtils; import org.opensearch.common.settings.Settings; import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.rest.RestStatus; -import org.opensearch.index.IndexSettings; import org.opensearch.indices.InvalidIndexNameException; -import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.repositories.RepositoriesService; -import org.opensearch.test.InternalTestCluster; - -import java.io.IOException; import java.nio.file.Path; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.Arrays; -import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -86,8 +71,6 @@ import static org.hamcrest.Matchers.nullValue; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; -import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_STORE_ENABLED; -import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY; import static org.opensearch.index.IndexSettings.INDEX_REFRESH_INTERVAL_SETTING; import static org.opensearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING; import static org.opensearch.index.query.QueryBuilders.matchQuery; @@ -174,494 +157,6 @@ public void testParallelRestoreOperations() { assertThat(client.prepareGet(restoredIndexName2, docId2).get().isExists(), equalTo(true)); } - public void testRestoreRemoteStoreIndicesWithRemoteTranslog() throws IOException, ExecutionException, InterruptedException { - testRestoreOperationsShallowCopyEnabled(); - } - - public void testRestoreOperationsShallowCopyEnabled() throws IOException, ExecutionException, InterruptedException { - String clusterManagerNode = internalCluster().startClusterManagerOnlyNode(); - String primary = internalCluster().startDataOnlyNode(); - String indexName1 = "testindex1"; - String indexName2 = "testindex2"; - String snapshotRepoName = "test-restore-snapshot-repo"; - String remoteStoreRepoName = "test-rs-repo" + TEST_REMOTE_STORE_REPO_SUFFIX; - String snapshotName1 = "test-restore-snapshot1"; - String snapshotName2 = "test-restore-snapshot2"; - Path absolutePath1 = randomRepoPath().toAbsolutePath(); - Path absolutePath2 = randomRepoPath().toAbsolutePath(); - logger.info("Snapshot Path [{}]", absolutePath1); - logger.info("Remote Store Repo Path [{}]", absolutePath2); - String restoredIndexName1 = indexName1 + "-restored"; - String restoredIndexName1Seg = indexName1 + "-restored-seg"; - String restoredIndexName1Doc = indexName1 + "-restored-doc"; - String restoredIndexName2 = indexName2 + "-restored"; - - createRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, true)); - createRepository(remoteStoreRepoName, "fs", absolutePath2); - - Client client = client(); - Settings indexSettings = getIndexSettings(true, remoteStoreRepoName, 1, 0).build(); - createIndex(indexName1, indexSettings); - - Settings indexSettings2 = getIndexSettings(false, null, 1, 0).build(); - createIndex(indexName2, indexSettings2); - - final int numDocsInIndex1 = 5; - final int numDocsInIndex2 = 6; - indexDocuments(client, indexName1, numDocsInIndex1); - indexDocuments(client, indexName2, numDocsInIndex2); - ensureGreen(indexName1, indexName2); - - internalCluster().startDataOnlyNode(); - logger.info("--> snapshot"); - CreateSnapshotResponse createSnapshotResponse = client.admin() - .cluster() - .prepareCreateSnapshot(snapshotRepoName, snapshotName1) - .setWaitForCompletion(true) - .setIndices(indexName1, indexName2) - .get(); - assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); - assertThat( - createSnapshotResponse.getSnapshotInfo().successfulShards(), - equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()) - ); - assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); - - updateRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, false)); - CreateSnapshotResponse createSnapshotResponse2 = client.admin() - .cluster() - .prepareCreateSnapshot(snapshotRepoName, snapshotName2) - .setWaitForCompletion(true) - .setIndices(indexName1, indexName2) - .get(); - assertThat(createSnapshotResponse2.getSnapshotInfo().successfulShards(), greaterThan(0)); - assertThat( - createSnapshotResponse2.getSnapshotInfo().successfulShards(), - equalTo(createSnapshotResponse2.getSnapshotInfo().totalShards()) - ); - assertThat(createSnapshotResponse2.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); - - DeleteResponse deleteResponse = client().prepareDelete(indexName1, "0").execute().actionGet(); - assertEquals(deleteResponse.getResult(), DocWriteResponse.Result.DELETED); - indexDocuments(client, indexName1, numDocsInIndex1, numDocsInIndex1 + randomIntBetween(2, 5)); - ensureGreen(indexName1); - - RestoreSnapshotResponse restoreSnapshotResponse1 = client.admin() - .cluster() - .prepareRestoreSnapshot(snapshotRepoName, snapshotName1) - .setWaitForCompletion(false) - .setIndices(indexName1) - .setRenamePattern(indexName1) - .setRenameReplacement(restoredIndexName1) - .get(); - RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin() - .cluster() - .prepareRestoreSnapshot(snapshotRepoName, snapshotName2) - .setWaitForCompletion(false) - .setIndices(indexName2) - .setRenamePattern(indexName2) - .setRenameReplacement(restoredIndexName2) - .get(); - assertEquals(restoreSnapshotResponse1.status(), RestStatus.ACCEPTED); - assertEquals(restoreSnapshotResponse2.status(), RestStatus.ACCEPTED); - ensureGreen(restoredIndexName1, restoredIndexName2); - assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1); - assertDocsPresentInIndex(client, restoredIndexName2, numDocsInIndex2); - - // deleting data for restoredIndexName1 and restoring from remote store. - internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primary)); - ensureRed(restoredIndexName1); - // Re-initialize client to make sure we are not using client from stopped node. - client = client(clusterManagerNode); - assertAcked(client.admin().indices().prepareClose(restoredIndexName1)); - client.admin() - .cluster() - .restoreRemoteStore( - new RestoreRemoteStoreRequest().indices(restoredIndexName1).restoreAllShards(true), - PlainActionFuture.newFuture() - ); - ensureYellowAndNoInitializingShards(restoredIndexName1); - ensureGreen(restoredIndexName1); - assertDocsPresentInIndex(client(), restoredIndexName1, numDocsInIndex1); - // indexing some new docs and validating - indexDocuments(client, restoredIndexName1, numDocsInIndex1, numDocsInIndex1 + 2); - ensureGreen(restoredIndexName1); - assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1 + 2); - - // restore index as seg rep enabled with remote store and remote translog disabled - RestoreSnapshotResponse restoreSnapshotResponse3 = client.admin() - .cluster() - .prepareRestoreSnapshot(snapshotRepoName, snapshotName1) - .setWaitForCompletion(false) - .setIgnoreIndexSettings(IndexMetadata.SETTING_REMOTE_STORE_ENABLED) - .setIndices(indexName1) - .setRenamePattern(indexName1) - .setRenameReplacement(restoredIndexName1Seg) - .get(); - assertEquals(restoreSnapshotResponse3.status(), RestStatus.ACCEPTED); - ensureGreen(restoredIndexName1Seg); - - GetIndexResponse getIndexResponse = client.admin() - .indices() - .getIndex(new GetIndexRequest().indices(restoredIndexName1Seg).includeDefaults(true)) - .get(); - indexSettings = getIndexResponse.settings().get(restoredIndexName1Seg); - assertNull(indexSettings.get(SETTING_REMOTE_STORE_ENABLED)); - assertNull(indexSettings.get(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, null)); - assertEquals(ReplicationType.SEGMENT.toString(), indexSettings.get(IndexMetadata.SETTING_REPLICATION_TYPE)); - assertDocsPresentInIndex(client, restoredIndexName1Seg, numDocsInIndex1); - // indexing some new docs and validating - indexDocuments(client, restoredIndexName1Seg, numDocsInIndex1, numDocsInIndex1 + 2); - ensureGreen(restoredIndexName1Seg); - assertDocsPresentInIndex(client, restoredIndexName1Seg, numDocsInIndex1 + 2); - - // restore index as doc rep based from shallow copy snapshot - RestoreSnapshotResponse restoreSnapshotResponse4 = client.admin() - .cluster() - .prepareRestoreSnapshot(snapshotRepoName, snapshotName1) - .setWaitForCompletion(false) - .setIgnoreIndexSettings(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, IndexMetadata.SETTING_REPLICATION_TYPE) - .setIndices(indexName1) - .setRenamePattern(indexName1) - .setRenameReplacement(restoredIndexName1Doc) - .get(); - assertEquals(restoreSnapshotResponse4.status(), RestStatus.ACCEPTED); - ensureGreen(restoredIndexName1Doc); - - getIndexResponse = client.admin() - .indices() - .getIndex(new GetIndexRequest().indices(restoredIndexName1Doc).includeDefaults(true)) - .get(); - indexSettings = getIndexResponse.settings().get(restoredIndexName1Doc); - assertNull(indexSettings.get(SETTING_REMOTE_STORE_ENABLED)); - assertNull(indexSettings.get(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, null)); - assertNull(indexSettings.get(IndexMetadata.SETTING_REPLICATION_TYPE)); - assertDocsPresentInIndex(client, restoredIndexName1Doc, numDocsInIndex1); - // indexing some new docs and validating - indexDocuments(client, restoredIndexName1Doc, numDocsInIndex1, numDocsInIndex1 + 2); - ensureGreen(restoredIndexName1Doc); - assertDocsPresentInIndex(client, restoredIndexName1Doc, numDocsInIndex1 + 2); - } - - public void testRestoreInSameRemoteStoreEnabledIndex() throws IOException { - String clusterManagerNode = internalCluster().startClusterManagerOnlyNode(); - String primary = internalCluster().startDataOnlyNode(); - String indexName1 = "testindex1"; - String indexName2 = "testindex2"; - String snapshotRepoName = "test-restore-snapshot-repo"; - String remoteStoreRepoName = "test-rs-repo" + TEST_REMOTE_STORE_REPO_SUFFIX; - String snapshotName1 = "test-restore-snapshot1"; - String snapshotName2 = "test-restore-snapshot2"; - Path absolutePath1 = randomRepoPath().toAbsolutePath(); - Path absolutePath2 = randomRepoPath().toAbsolutePath(); - logger.info("Snapshot Path [{}]", absolutePath1); - logger.info("Remote Store Repo Path [{}]", absolutePath2); - String restoredIndexName2 = indexName2 + "-restored"; - - boolean enableShallowCopy = randomBoolean(); - createRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, enableShallowCopy)); - createRepository(remoteStoreRepoName, "fs", absolutePath2); - - Client client = client(); - Settings indexSettings = getIndexSettings(true, remoteStoreRepoName, 1, 0).build(); - createIndex(indexName1, indexSettings); - - Settings indexSettings2 = getIndexSettings(false, null, 1, 0).build(); - createIndex(indexName2, indexSettings2); - - final int numDocsInIndex1 = 5; - final int numDocsInIndex2 = 6; - indexDocuments(client, indexName1, numDocsInIndex1); - indexDocuments(client, indexName2, numDocsInIndex2); - ensureGreen(indexName1, indexName2); - - internalCluster().startDataOnlyNode(); - logger.info("--> snapshot"); - CreateSnapshotResponse createSnapshotResponse = client.admin() - .cluster() - .prepareCreateSnapshot(snapshotRepoName, snapshotName1) - .setWaitForCompletion(true) - .setIndices(indexName1, indexName2) - .get(); - assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); - assertThat( - createSnapshotResponse.getSnapshotInfo().successfulShards(), - equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()) - ); - assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); - - updateRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, false)); - CreateSnapshotResponse createSnapshotResponse2 = client.admin() - .cluster() - .prepareCreateSnapshot(snapshotRepoName, snapshotName2) - .setWaitForCompletion(true) - .setIndices(indexName1, indexName2) - .get(); - assertThat(createSnapshotResponse2.getSnapshotInfo().successfulShards(), greaterThan(0)); - assertThat( - createSnapshotResponse2.getSnapshotInfo().successfulShards(), - equalTo(createSnapshotResponse2.getSnapshotInfo().totalShards()) - ); - assertThat(createSnapshotResponse2.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); - - DeleteResponse deleteResponse = client().prepareDelete(indexName1, "0").execute().actionGet(); - assertEquals(deleteResponse.getResult(), DocWriteResponse.Result.DELETED); - indexDocuments(client, indexName1, numDocsInIndex1, numDocsInIndex1 + randomIntBetween(2, 5)); - ensureGreen(indexName1); - - assertAcked(client().admin().indices().prepareClose(indexName1)); - - RestoreSnapshotResponse restoreSnapshotResponse1 = client.admin() - .cluster() - .prepareRestoreSnapshot(snapshotRepoName, snapshotName1) - .setWaitForCompletion(false) - .setIndices(indexName1) - .get(); - RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin() - .cluster() - .prepareRestoreSnapshot(snapshotRepoName, snapshotName2) - .setWaitForCompletion(false) - .setIndices(indexName2) - .setRenamePattern(indexName2) - .setRenameReplacement(restoredIndexName2) - .get(); - assertEquals(restoreSnapshotResponse1.status(), RestStatus.ACCEPTED); - assertEquals(restoreSnapshotResponse2.status(), RestStatus.ACCEPTED); - ensureGreen(indexName1, restoredIndexName2); - assertDocsPresentInIndex(client, indexName1, numDocsInIndex1); - assertDocsPresentInIndex(client, restoredIndexName2, numDocsInIndex2); - - // deleting data for restoredIndexName1 and restoring from remote store. - internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primary)); - ensureRed(indexName1); - // Re-initialize client to make sure we are not using client from stopped node. - client = client(clusterManagerNode); - assertAcked(client.admin().indices().prepareClose(indexName1)); - client.admin() - .cluster() - .restoreRemoteStore(new RestoreRemoteStoreRequest().indices(indexName1).restoreAllShards(true), PlainActionFuture.newFuture()); - ensureYellowAndNoInitializingShards(indexName1); - ensureGreen(indexName1); - assertDocsPresentInIndex(client(), indexName1, numDocsInIndex1); - // indexing some new docs and validating - indexDocuments(client, indexName1, numDocsInIndex1, numDocsInIndex1 + 2); - ensureGreen(indexName1); - assertDocsPresentInIndex(client, indexName1, numDocsInIndex1 + 2); - } - - public void testRestoreShallowCopySnapshotWithDifferentRepo() throws IOException { - String clusterManagerNode = internalCluster().startClusterManagerOnlyNode(); - String primary = internalCluster().startDataOnlyNode(); - String indexName1 = "testindex1"; - String indexName2 = "testindex2"; - String snapshotRepoName = "test-restore-snapshot-repo"; - String remoteStoreRepoName = "test-rs-repo" + TEST_REMOTE_STORE_REPO_SUFFIX; - String remoteStoreRepo2Name = "test-rs-repo-2" + TEST_REMOTE_STORE_REPO_SUFFIX; - String snapshotName1 = "test-restore-snapshot1"; - Path absolutePath1 = randomRepoPath().toAbsolutePath(); - Path absolutePath2 = randomRepoPath().toAbsolutePath(); - Path absolutePath3 = randomRepoPath().toAbsolutePath(); - String restoredIndexName1 = indexName1 + "-restored"; - - createRepository(snapshotRepoName, "fs", getRepositorySettings(absolutePath1, false)); - createRepository(remoteStoreRepoName, "fs", absolutePath2); - createRepository(remoteStoreRepo2Name, "fs", absolutePath3); - - Client client = client(); - Settings indexSettings = getIndexSettings(true, remoteStoreRepoName, 1, 0).build(); - createIndex(indexName1, indexSettings); - - Settings indexSettings2 = getIndexSettings(false, null, 1, 0).build(); - createIndex(indexName2, indexSettings2); - - final int numDocsInIndex1 = 5; - final int numDocsInIndex2 = 6; - indexDocuments(client, indexName1, numDocsInIndex1); - indexDocuments(client, indexName2, numDocsInIndex2); - ensureGreen(indexName1, indexName2); - - internalCluster().startDataOnlyNode(); - - logger.info("--> snapshot"); - CreateSnapshotResponse createSnapshotResponse = client.admin() - .cluster() - .prepareCreateSnapshot(snapshotRepoName, snapshotName1) - .setWaitForCompletion(true) - .setIndices(indexName1, indexName2) - .get(); - assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); - assertThat( - createSnapshotResponse.getSnapshotInfo().successfulShards(), - equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()) - ); - assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); - - Settings remoteStoreIndexSettings = Settings.builder() - .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, remoteStoreRepo2Name) - .build(); - // restore index as a remote store index with different remote store repo - RestoreSnapshotResponse restoreSnapshotResponse = client.admin() - .cluster() - .prepareRestoreSnapshot(snapshotRepoName, snapshotName1) - .setWaitForCompletion(false) - .setIndexSettings(remoteStoreIndexSettings) - .setIndices(indexName1) - .setRenamePattern(indexName1) - .setRenameReplacement(restoredIndexName1) - .get(); - assertEquals(restoreSnapshotResponse.status(), RestStatus.ACCEPTED); - ensureGreen(restoredIndexName1); - assertDocsPresentInIndex(client(), restoredIndexName1, numDocsInIndex1); - - // deleting data for restoredIndexName1 and restoring from remote store. - internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primary)); - // Re-initialize client to make sure we are not using client from stopped node. - client = client(clusterManagerNode); - assertAcked(client.admin().indices().prepareClose(restoredIndexName1)); - client.admin() - .cluster() - .restoreRemoteStore( - new RestoreRemoteStoreRequest().indices(restoredIndexName1).restoreAllShards(true), - PlainActionFuture.newFuture() - ); - ensureYellowAndNoInitializingShards(restoredIndexName1); - ensureGreen(restoredIndexName1); - // indexing some new docs and validating - assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1); - indexDocuments(client, restoredIndexName1, numDocsInIndex1, numDocsInIndex1 + 2); - ensureGreen(restoredIndexName1); - assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1 + 2); - } - - private Settings.Builder getIndexSettings(boolean enableRemoteStore, String remoteStoreRepo, int numOfShards, int numOfReplicas) { - Settings.Builder settingsBuilder = Settings.builder() - .put(super.indexSettings()) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numOfShards) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numOfReplicas); - if (enableRemoteStore) { - settingsBuilder.put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) - .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, remoteStoreRepo) - .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, remoteStoreRepo) - .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "300s") - .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT); - } - return settingsBuilder; - } - - public void testRestoreShallowSnapshotRepositoryOverriden() throws ExecutionException, InterruptedException { - String indexName1 = "testindex1"; - String snapshotRepoName = "test-restore-snapshot-repo"; - String remoteStoreRepoName = "test-rs-repo" + TEST_REMOTE_STORE_REPO_SUFFIX; - String remoteStoreRepoNameUpdated = "test-rs-repo-updated" + TEST_REMOTE_STORE_REPO_SUFFIX; - String snapshotName1 = "test-restore-snapshot1"; - Path absolutePath1 = randomRepoPath().toAbsolutePath(); - Path absolutePath2 = randomRepoPath().toAbsolutePath(); - Path absolutePath3 = randomRepoPath().toAbsolutePath(); - String[] pathTokens = absolutePath1.toString().split("/"); - String basePath = pathTokens[pathTokens.length - 1]; - Arrays.copyOf(pathTokens, pathTokens.length - 1); - Path location = PathUtils.get(String.join("/", pathTokens)); - pathTokens = absolutePath2.toString().split("/"); - String basePath2 = pathTokens[pathTokens.length - 1]; - Arrays.copyOf(pathTokens, pathTokens.length - 1); - Path location2 = PathUtils.get(String.join("/", pathTokens)); - logger.info("Path 1 [{}]", absolutePath1); - logger.info("Path 2 [{}]", absolutePath2); - logger.info("Path 3 [{}]", absolutePath3); - String restoredIndexName1 = indexName1 + "-restored"; - - createRepository(snapshotRepoName, "fs", getRepositorySettings(location, basePath, true)); - createRepository(remoteStoreRepoName, "fs", absolutePath3); - - Client client = client(); - Settings indexSettings = Settings.builder() - .put(super.indexSettings()) - .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) - .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, remoteStoreRepoName) - .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, remoteStoreRepoName) - .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "300s") - .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .build(); - createIndex(indexName1, indexSettings); - - int numDocsInIndex1 = randomIntBetween(2, 5); - indexDocuments(client, indexName1, numDocsInIndex1); - - ensureGreen(indexName1); - - logger.info("--> snapshot"); - CreateSnapshotResponse createSnapshotResponse = client.admin() - .cluster() - .prepareCreateSnapshot(snapshotRepoName, snapshotName1) - .setWaitForCompletion(true) - .setIndices(indexName1) - .get(); - assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); - assertThat( - createSnapshotResponse.getSnapshotInfo().successfulShards(), - equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()) - ); - assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.SUCCESS)); - - createRepository(remoteStoreRepoName, "fs", absolutePath2); - - RestoreSnapshotResponse restoreSnapshotResponse = client.admin() - .cluster() - .prepareRestoreSnapshot(snapshotRepoName, snapshotName1) - .setWaitForCompletion(true) - .setIndices(indexName1) - .setRenamePattern(indexName1) - .setRenameReplacement(restoredIndexName1) - .get(); - - assertTrue(restoreSnapshotResponse.getRestoreInfo().failedShards() > 0); - - ensureRed(restoredIndexName1); - - client().admin().indices().close(Requests.closeIndexRequest(restoredIndexName1)).get(); - createRepository(remoteStoreRepoNameUpdated, "fs", absolutePath3); - RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin() - .cluster() - .prepareRestoreSnapshot(snapshotRepoName, snapshotName1) - .setWaitForCompletion(true) - .setIndices(indexName1) - .setRenamePattern(indexName1) - .setRenameReplacement(restoredIndexName1) - .setSourceRemoteStoreRepository(remoteStoreRepoNameUpdated) - .get(); - - assertTrue(restoreSnapshotResponse2.getRestoreInfo().failedShards() == 0); - ensureGreen(restoredIndexName1); - assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1); - - // indexing some new docs and validating - indexDocuments(client, restoredIndexName1, numDocsInIndex1, numDocsInIndex1 + 2); - ensureGreen(restoredIndexName1); - assertDocsPresentInIndex(client, restoredIndexName1, numDocsInIndex1 + 2); - } - - private void indexDocuments(Client client, String indexName, int numOfDocs) { - indexDocuments(client, indexName, 0, numOfDocs); - } - - private void indexDocuments(Client client, String indexName, int fromId, int toId) { - for (int i = fromId; i < toId; i++) { - String id = Integer.toString(i); - client.prepareIndex(indexName).setId(id).setSource("text", "sometext").get(); - } - client.admin().indices().prepareFlush(indexName).get(); - } - - private void assertDocsPresentInIndex(Client client, String indexName, int numOfDocs) { - for (int i = 0; i < numOfDocs; i++) { - String id = Integer.toString(i); - logger.info("checking for index " + indexName + " with docId" + id); - assertTrue("doc with id" + id + " is not present for index " + indexName, client.prepareGet(indexName, id).get().isExists()); - } - } - public void testParallelRestoreOperationsFromSingleSnapshot() throws Exception { String indexName1 = "testindex1"; String indexName2 = "testindex2"; diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SnapshotStatusApisIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SnapshotStatusApisIT.java index e47a2b94fc715..e02a5b95da400 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SnapshotStatusApisIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SnapshotStatusApisIT.java @@ -112,7 +112,7 @@ public void testStatusApiConsistency() { assertEquals(snStatus.getStats().getTime(), snapshotInfo.endTime() - snapshotInfo.startTime()); } - public void testStatusAPICallForShallowCopySnapshot() throws Exception { + public void testStatusAPICallForShallowCopySnapshot() { disableRepoConsistencyCheck("Remote store repository is being used for the test"); internalCluster().startClusterManagerOnlyNode(); internalCluster().startDataOnlyNode(); @@ -120,10 +120,6 @@ public void testStatusAPICallForShallowCopySnapshot() throws Exception { final String snapshotRepoName = "snapshot-repo-name"; createRepository(snapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy()); - final Path remoteStoreRepoPath = randomRepoPath(); - final String remoteStoreRepoName = "remote-store-repo-name"; - createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath); - final String indexName = "index-1"; createIndex(indexName); ensureGreen(); @@ -133,20 +129,8 @@ public void testStatusAPICallForShallowCopySnapshot() throws Exception { } refresh(); - final String remoteStoreEnabledIndexName = "remote-index-1"; - final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName); - createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); - ensureGreen(); - - logger.info("--> indexing some data"); - for (int i = 0; i < 100; i++) { - index(remoteStoreEnabledIndexName, "_doc", Integer.toString(i), "foo", "bar" + i); - } - refresh(); - final String snapshot = "snapshot"; createFullSnapshot(snapshotRepoName, snapshot); - assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 1); final SnapshotStatus snapshotStatus = getSnapshotStatus(snapshotRepoName, snapshot); assertThat(snapshotStatus.getState(), is(SnapshotsInProgress.State.SUCCESS)); @@ -157,14 +141,6 @@ public void testStatusAPICallForShallowCopySnapshot() throws Exception { assertThat(snapshotShardState.getStats().getTotalSize(), greaterThan(0L)); assertThat(snapshotShardState.getStats().getIncrementalFileCount(), greaterThan(0)); assertThat(snapshotShardState.getStats().getIncrementalSize(), greaterThan(0L)); - - // Validating that the incremental file count and incremental file size is zero for shallow copy - final SnapshotIndexShardStatus shallowSnapshotShardState = stateFirstShard(snapshotStatus, remoteStoreEnabledIndexName); - assertThat(shallowSnapshotShardState.getStage(), is(SnapshotIndexShardStage.DONE)); - assertThat(shallowSnapshotShardState.getStats().getTotalFileCount(), greaterThan(0)); - assertThat(shallowSnapshotShardState.getStats().getTotalSize(), greaterThan(0L)); - assertThat(shallowSnapshotShardState.getStats().getIncrementalFileCount(), is(0)); - assertThat(shallowSnapshotShardState.getStats().getIncrementalSize(), is(0L)); } public void testStatusAPICallInProgressSnapshot() throws Exception { @@ -245,63 +221,6 @@ public void testExceptionOnMissingShardLevelSnapBlob() throws IOException { ); } - public void testStatusAPIStatsForBackToBackShallowSnapshot() throws Exception { - disableRepoConsistencyCheck("Remote store repository is being used for the test"); - internalCluster().startClusterManagerOnlyNode(); - internalCluster().startDataOnlyNode(); - - final String snapshotRepoName = "snapshot-repo-name"; - createRepository(snapshotRepoName, "fs", snapshotRepoSettingsForShallowCopy()); - - final Path remoteStoreRepoPath = randomRepoPath(); - final String remoteStoreRepoName = "remote-store-repo-name"; - createRepository(remoteStoreRepoName, "fs", remoteStoreRepoPath); - - final String indexName = "index-1"; - createIndex(indexName); - ensureGreen(); - logger.info("--> indexing some data"); - for (int i = 0; i < 100; i++) { - index(indexName, "_doc", Integer.toString(i), "foo", "bar" + i); - } - refresh(); - - final String remoteStoreEnabledIndexName = "remote-index-1"; - final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName); - createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); - ensureGreen(); - - logger.info("--> indexing some data"); - for (int i = 0; i < 100; i++) { - index(remoteStoreEnabledIndexName, "_doc", Integer.toString(i), "foo", "bar" + i); - } - refresh(); - - createFullSnapshot(snapshotRepoName, "test-snap-1"); - assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 1); - - SnapshotStatus snapshotStatus = getSnapshotStatus(snapshotRepoName, "test-snap-1"); - assertThat(snapshotStatus.getState(), is(SnapshotsInProgress.State.SUCCESS)); - - SnapshotIndexShardStatus shallowSnapshotShardState = stateFirstShard(snapshotStatus, remoteStoreEnabledIndexName); - assertThat(shallowSnapshotShardState.getStage(), is(SnapshotIndexShardStage.DONE)); - final int totalFileCount = shallowSnapshotShardState.getStats().getTotalFileCount(); - final long totalSize = shallowSnapshotShardState.getStats().getTotalSize(); - final int incrementalFileCount = shallowSnapshotShardState.getStats().getIncrementalFileCount(); - final long incrementalSize = shallowSnapshotShardState.getStats().getIncrementalSize(); - - createFullSnapshot(snapshotRepoName, "test-snap-2"); - assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, remoteStoreRepoName).length == 2); - - snapshotStatus = getSnapshotStatus(snapshotRepoName, "test-snap-2"); - assertThat(snapshotStatus.getState(), is(SnapshotsInProgress.State.SUCCESS)); - shallowSnapshotShardState = stateFirstShard(snapshotStatus, remoteStoreEnabledIndexName); - assertThat(shallowSnapshotShardState.getStats().getTotalFileCount(), equalTo(totalFileCount)); - assertThat(shallowSnapshotShardState.getStats().getTotalSize(), equalTo(totalSize)); - assertThat(shallowSnapshotShardState.getStats().getIncrementalFileCount(), equalTo(incrementalFileCount)); - assertThat(shallowSnapshotShardState.getStats().getIncrementalSize(), equalTo(incrementalSize)); - } - public void testGetSnapshotsWithoutIndices() throws Exception { createRepository("test-repo", "fs"); @@ -441,17 +360,12 @@ public void testSnapshotStatusOnFailedSnapshot() throws Exception { } public void testStatusAPICallInProgressShallowSnapshot() throws Exception { - disableRepoConsistencyCheck("Remote store repository is being used for the test"); internalCluster().startClusterManagerOnlyNode(); internalCluster().startDataOnlyNode(); final String snapshotRepoName = "snapshot-repo-name"; createRepository(snapshotRepoName, "mock", snapshotRepoSettingsForShallowCopy().put("block_on_data", true)); - final Path remoteStoreRepoPath = randomRepoPath(); - final String remoteStoreRepoName = "remote-store-repo-name"; - createRepository(remoteStoreRepoName, "mock", remoteStoreRepoPath); - final String indexName = "index-1"; createIndex(indexName); ensureGreen(); @@ -461,17 +375,6 @@ public void testStatusAPICallInProgressShallowSnapshot() throws Exception { } refresh(); - final String remoteStoreEnabledIndexName = "remote-index-1"; - final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepoName); - createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); - ensureGreen(); - - logger.info("--> indexing some data"); - for (int i = 0; i < 100; i++) { - index(remoteStoreEnabledIndexName, "_doc", Integer.toString(i), "foo", "bar" + i); - } - refresh(); - logger.info("--> snapshot"); ActionFuture createSnapshotResponseActionFuture = startFullSnapshot(snapshotRepoName, "test-snap"); diff --git a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java index bcf9cbd0efef8..267abbbd6b6fe 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java @@ -82,6 +82,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.function.Function; @@ -285,6 +286,32 @@ public Iterator> settings() { SETTING_REPLICATION_TYPE, ReplicationType.DOCUMENT.toString(), ReplicationType::parseString, + new Setting.Validator<>() { + + @Override + public void validate(final ReplicationType value) {} + + @Override + public void validate(final ReplicationType value, final Map, Object> settings) { + final Object remoteStoreEnabled = settings.get(INDEX_REMOTE_STORE_ENABLED_SETTING); + if (ReplicationType.SEGMENT.equals(value) == false && Objects.equals(remoteStoreEnabled, true)) { + throw new IllegalArgumentException( + "To enable " + + INDEX_REMOTE_STORE_ENABLED_SETTING.getKey() + + ", " + + INDEX_REPLICATION_TYPE_SETTING.getKey() + + " should be set to " + + ReplicationType.SEGMENT + ); + } + } + + @Override + public Iterator> settings() { + final List> settings = List.of(INDEX_REMOTE_STORE_ENABLED_SETTING); + return settings.iterator(); + } + }, Property.IndexScope, Property.Final ); @@ -328,13 +355,14 @@ public Iterator> settings() { } }, Property.IndexScope, - Property.Final + Property.PrivateIndex, + Property.Dynamic ); /** * Used to specify remote store repository to use for this index. */ - public static final Setting INDEX_REMOTE_STORE_REPOSITORY_SETTING = Setting.simpleString( + public static final Setting INDEX_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING = Setting.simpleString( SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, new Setting.Validator<>() { @@ -345,10 +373,12 @@ public void validate(final String value) {} public void validate(final String value, final Map, Object> settings) { if (value == null || value.isEmpty()) { throw new IllegalArgumentException( - "Setting " + INDEX_REMOTE_STORE_REPOSITORY_SETTING.getKey() + " should be provided with non-empty repository ID" + "Setting " + + INDEX_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING.getKey() + + " should be provided with non-empty repository ID" ); } else { - validateRemoteStoreSettingEnabled(settings, INDEX_REMOTE_STORE_REPOSITORY_SETTING); + validateRemoteStoreSettingEnabled(settings, INDEX_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING); } } @@ -359,7 +389,8 @@ public Iterator> settings() { } }, Property.IndexScope, - Property.Final + Property.PrivateIndex, + Property.Dynamic ); private static void validateRemoteStoreSettingEnabled(final Map, Object> settings, Setting setting) { @@ -409,7 +440,8 @@ public Iterator> settings() { } }, Property.IndexScope, - Property.Final + Property.PrivateIndex, + Property.Dynamic ); public static final String SETTING_AUTO_EXPAND_REPLICAS = "index.auto_expand_replicas"; diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java index 728bac647d74a..db9964b1a2ff8 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java @@ -91,7 +91,6 @@ import org.opensearch.indices.InvalidIndexNameException; import org.opensearch.indices.ShardLimitValidator; import org.opensearch.indices.SystemIndices; -import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.threadpool.ThreadPool; import java.io.IOException; @@ -105,7 +104,6 @@ import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; @@ -120,9 +118,6 @@ import static java.util.stream.Collectors.toList; import static org.opensearch.cluster.metadata.IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING; import static org.opensearch.cluster.metadata.IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING; -import static org.opensearch.cluster.metadata.IndexMetadata.INDEX_REMOTE_STORE_ENABLED_SETTING; -import static org.opensearch.cluster.metadata.IndexMetadata.INDEX_REMOTE_STORE_REPOSITORY_SETTING; -import static org.opensearch.cluster.metadata.IndexMetadata.INDEX_REMOTE_TRANSLOG_REPOSITORY_SETTING; import static org.opensearch.cluster.metadata.IndexMetadata.INDEX_REPLICATION_TYPE_SETTING; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_CREATION_DATE; @@ -134,7 +129,7 @@ import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REPLICATION_TYPE; import static org.opensearch.cluster.metadata.Metadata.DEFAULT_REPLICA_COUNT_SETTING; -import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_STORE_REPOSITORY_SETTING; +import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING; import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING; import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_STORE_ENABLED_SETTING; import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING; @@ -892,7 +887,7 @@ static Settings aggregateIndexSettings( indexSettingsBuilder.put(SETTING_INDEX_UUID, UUIDs.randomBase64UUID()); updateReplicationStrategy(indexSettingsBuilder, request.settings(), settings); - updateRemoteStoreSettings(indexSettingsBuilder, request.settings(), settings); + updateRemoteStoreSettings(indexSettingsBuilder, settings); if (sourceMetadata != null) { assert request.resizeType() != null; @@ -947,53 +942,16 @@ private static void updateReplicationStrategy(Settings.Builder settingsBuilder, /** * Updates index settings to enable remote store by default based on cluster level settings * @param settingsBuilder index settings builder to be updated with relevant settings - * @param requestSettings settings passed in during index create request * @param clusterSettings cluster level settings */ - private static void updateRemoteStoreSettings(Settings.Builder settingsBuilder, Settings requestSettings, Settings clusterSettings) { - if (CLUSTER_REMOTE_STORE_ENABLED_SETTING.get(clusterSettings)) { - // Verify if we can create a remote store based index based on user provided settings - if (canCreateRemoteStoreIndex(requestSettings) == false) { - return; - } - - // Verify index has replication type as SEGMENT - if (ReplicationType.DOCUMENT.equals(ReplicationType.parseString(settingsBuilder.get(SETTING_REPLICATION_TYPE)))) { - throw new IllegalArgumentException( - "Cannot enable [" - + SETTING_REMOTE_STORE_ENABLED - + "] when [" - + SETTING_REPLICATION_TYPE - + "] is " - + ReplicationType.DOCUMENT - ); - } - - settingsBuilder.put(SETTING_REMOTE_STORE_ENABLED, true); - String remoteStoreRepo; - if (Objects.equals(requestSettings.get(INDEX_REMOTE_STORE_ENABLED_SETTING.getKey()), "true")) { - remoteStoreRepo = requestSettings.get(INDEX_REMOTE_STORE_REPOSITORY_SETTING.getKey()); - } else { - remoteStoreRepo = CLUSTER_REMOTE_STORE_REPOSITORY_SETTING.get(clusterSettings); - } - settingsBuilder.put(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, remoteStoreRepo) - .put( - SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, - requestSettings.get( - INDEX_REMOTE_TRANSLOG_REPOSITORY_SETTING.getKey(), - CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING.get(clusterSettings) - ) - ); + private static void updateRemoteStoreSettings(Settings.Builder settingsBuilder, Settings clusterSettings) { + if (CLUSTER_REMOTE_STORE_ENABLED_SETTING.get(clusterSettings) == true) { + settingsBuilder.put(SETTING_REMOTE_STORE_ENABLED, true) + .put(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, CLUSTER_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING.get(clusterSettings)) + .put(SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING.get(clusterSettings)); } } - private static boolean canCreateRemoteStoreIndex(Settings requestSettings) { - return (INDEX_REPLICATION_TYPE_SETTING.exists(requestSettings) == false - || INDEX_REPLICATION_TYPE_SETTING.get(requestSettings).equals(ReplicationType.SEGMENT)) - && (INDEX_REMOTE_STORE_ENABLED_SETTING.exists(requestSettings) == false - || INDEX_REMOTE_STORE_ENABLED_SETTING.get(requestSettings)); - } - public static void validateStoreTypeSettings(Settings settings) { // deprecate simplefs store type: if (IndexModule.Type.SIMPLEFS.match(IndexModule.INDEX_STORE_TYPE_SETTING.get(settings))) { diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index 9da9e1b14d307..802eb7bd01254 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -673,7 +673,7 @@ public void apply(Settings value, Settings current, Settings previous) { List.of(FeatureFlags.REMOTE_STORE), List.of( IndicesService.CLUSTER_REMOTE_STORE_ENABLED_SETTING, - IndicesService.CLUSTER_REMOTE_STORE_REPOSITORY_SETTING, + IndicesService.CLUSTER_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING, IndicesService.CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING ), List.of(FeatureFlags.CONCURRENT_SEGMENT_SEARCH), diff --git a/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java index 3cc7c351fe1bf..be2b5f00bc0ec 100644 --- a/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java @@ -235,7 +235,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { FeatureFlags.REMOTE_STORE, List.of( IndexMetadata.INDEX_REMOTE_STORE_ENABLED_SETTING, - IndexMetadata.INDEX_REMOTE_STORE_REPOSITORY_SETTING, + IndexMetadata.INDEX_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING, IndexMetadata.INDEX_REMOTE_TRANSLOG_REPOSITORY_SETTING ), FeatureFlags.CONCURRENT_SEGMENT_SEARCH, diff --git a/server/src/main/java/org/opensearch/indices/IndicesService.java b/server/src/main/java/org/opensearch/indices/IndicesService.java index 36b937e7df76a..7be824d95b421 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesService.java +++ b/server/src/main/java/org/opensearch/indices/IndicesService.java @@ -256,7 +256,7 @@ public class IndicesService extends AbstractLifecycleComponent /** * Used to specify default repo to use for segment upload for remote store backed indices */ - public static final Setting CLUSTER_REMOTE_STORE_REPOSITORY_SETTING = Setting.simpleString( + public static final Setting CLUSTER_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING = Setting.simpleString( "cluster.remote_store.segment.repository", "", Property.NodeScope, diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsActionTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsActionTests.java index 375b1b8ed7aba..aa3e7ab1fb2c7 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsActionTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/remotestore/stats/TransportRemoteStoreStatsActionTests.java @@ -32,6 +32,7 @@ import org.opensearch.index.remote.RemoteSegmentTransferTracker; import org.opensearch.index.shard.IndexShardTestCase; import org.opensearch.indices.IndicesService; +import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.test.FeatureFlagSetter; import org.opensearch.test.transport.MockTransport; import org.opensearch.transport.TransportService; @@ -45,6 +46,7 @@ import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_INDEX_UUID; +import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REPLICATION_TYPE; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_STORE_ENABLED; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY; @@ -70,6 +72,7 @@ public void setUp() throws Exception { remoteStoreIndexMetadata = IndexMetadata.builder(INDEX.getName()) .settings( settings(Version.CURRENT).put(SETTING_INDEX_UUID, INDEX.getUUID()) + .put(SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) .put(SETTING_REMOTE_STORE_ENABLED, true) .put(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, "my-test-repo") .build() diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java index e52237c8dba99..63c3511a97d2b 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -101,6 +101,7 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.Locale; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; @@ -113,7 +114,6 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singleton; import static java.util.Collections.singletonList; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; @@ -143,7 +143,7 @@ import static org.opensearch.cluster.metadata.MetadataCreateIndexService.resolveAndValidateAliases; import static org.opensearch.index.IndexSettings.INDEX_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING; import static org.opensearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING; -import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_STORE_REPOSITORY_SETTING; +import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING; import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING; import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_STORE_ENABLED_SETTING; import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING; @@ -1191,40 +1191,11 @@ public void testvalidateIndexSettings() { threadPool.shutdown(); } - public void testRemoteStoreNoUserOverrideConflictingReplicationTypeIndexSettings() { - Settings settings = Settings.builder() - .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.DOCUMENT) - .put(CLUSTER_REMOTE_STORE_ENABLED_SETTING.getKey(), true) - .put(CLUSTER_REMOTE_STORE_REPOSITORY_SETTING.getKey(), "my-segment-repo-1") - .put(CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING.getKey(), "my-translog-repo-1") - .build(); - FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - - request = new CreateIndexClusterStateUpdateRequest("create index", "test", "test"); - IllegalArgumentException exc = expectThrows( - IllegalArgumentException.class, - () -> aggregateIndexSettings( - ClusterState.EMPTY_STATE, - request, - Settings.EMPTY, - null, - settings, - IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, - randomShardLimitService(), - Collections.emptySet() - ) - ); - assertThat( - exc.getMessage(), - containsString("Cannot enable [index.remote_store.enabled] when [index.replication.type] is DOCUMENT") - ); - } - public void testRemoteStoreNoUserOverrideExceptReplicationTypeSegmentIndexSettings() { Settings settings = Settings.builder() .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.DOCUMENT) .put(CLUSTER_REMOTE_STORE_ENABLED_SETTING.getKey(), true) - .put(CLUSTER_REMOTE_STORE_REPOSITORY_SETTING.getKey(), "my-segment-repo-1") + .put(CLUSTER_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING.getKey(), "my-segment-repo-1") .put(CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING.getKey(), "my-translog-repo-1") .build(); FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); @@ -1257,7 +1228,7 @@ public void testRemoteStoreNoUserOverrideIndexSettings() { Settings settings = Settings.builder() .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT) .put(CLUSTER_REMOTE_STORE_ENABLED_SETTING.getKey(), true) - .put(CLUSTER_REMOTE_STORE_REPOSITORY_SETTING.getKey(), "my-segment-repo-1") + .put(CLUSTER_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING.getKey(), "my-segment-repo-1") .put(CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING.getKey(), "my-translog-repo-1") .build(); FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); @@ -1284,137 +1255,103 @@ public void testRemoteStoreNoUserOverrideIndexSettings() { } public void testRemoteStoreDisabledByUserIndexSettings() { - Settings settings = Settings.builder() - .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT) - .put(CLUSTER_REMOTE_STORE_ENABLED_SETTING.getKey(), true) - .put(CLUSTER_REMOTE_STORE_REPOSITORY_SETTING.getKey(), "my-segment-repo-1") - .put(CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING.getKey(), "my-translog-repo-1") - .build(); - FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - - request = new CreateIndexClusterStateUpdateRequest("create index", "test", "test"); final Settings.Builder requestSettings = Settings.builder(); requestSettings.put(SETTING_REMOTE_STORE_ENABLED, false); - request.settings(requestSettings.build()); - Settings indexSettings = aggregateIndexSettings( - ClusterState.EMPTY_STATE, - request, - Settings.EMPTY, - null, - settings, - IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, - randomShardLimitService(), - Collections.emptySet() - ); - verifyRemoteStoreIndexSettings( - indexSettings, - "false", - null, - null, - ReplicationType.SEGMENT.toString(), - IndexSettings.DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL - ); + withTemporaryClusterService(((clusterService, threadPool) -> { + MetadataCreateIndexService checkerService = new MetadataCreateIndexService( + Settings.EMPTY, + clusterService, + null, + null, + null, + createTestShardLimitService(randomIntBetween(1, 1000), false, clusterService), + new Environment(Settings.builder().put("path.home", "dummy").build(), null), + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + threadPool, + null, + new SystemIndices(Collections.emptyMap()), + true, + new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()) + ); + + final List validationErrors = checkerService.getIndexSettingsValidationErrors( + requestSettings.build(), + true, + Optional.empty() + ); + assertThat(validationErrors.size(), is(1)); + assertThat( + validationErrors.get(0), + is(String.format(Locale.ROOT, "expected [%s] to be private but it was not", SETTING_REMOTE_STORE_ENABLED)) + ); + })); } public void testRemoteStoreOverrideSegmentRepoIndexSettings() { - Settings settings = Settings.builder() - .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT) - .put(CLUSTER_REMOTE_STORE_ENABLED_SETTING.getKey(), true) - .put(CLUSTER_REMOTE_STORE_REPOSITORY_SETTING.getKey(), "my-segment-repo-1") - .put(CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING.getKey(), "my-translog-repo-1") - .build(); - FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - - request = new CreateIndexClusterStateUpdateRequest("create index", "test", "test"); final Settings.Builder requestSettings = Settings.builder(); requestSettings.put(SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) - .put(SETTING_REMOTE_STORE_ENABLED, true) .put(SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, "my-custom-repo"); - request.settings(requestSettings.build()); - Settings indexSettings = aggregateIndexSettings( - ClusterState.EMPTY_STATE, - request, - Settings.EMPTY, - null, - settings, - IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, - randomShardLimitService(), - Collections.emptySet() - ); - verifyRemoteStoreIndexSettings( - indexSettings, - "true", - "my-custom-repo", - "my-translog-repo-1", - ReplicationType.SEGMENT.toString(), - IndexSettings.DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL - ); + withTemporaryClusterService(((clusterService, threadPool) -> { + MetadataCreateIndexService checkerService = new MetadataCreateIndexService( + Settings.EMPTY, + clusterService, + null, + null, + null, + createTestShardLimitService(randomIntBetween(1, 1000), false, clusterService), + new Environment(Settings.builder().put("path.home", "dummy").build(), null), + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + threadPool, + null, + new SystemIndices(Collections.emptyMap()), + true, + new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()) + ); + + final List validationErrors = checkerService.getIndexSettingsValidationErrors( + requestSettings.build(), + true, + Optional.empty() + ); + assertThat(validationErrors.size(), is(1)); + assertThat( + validationErrors.get(0), + is(String.format(Locale.ROOT, "expected [%s] to be private but it was not", SETTING_REMOTE_SEGMENT_STORE_REPOSITORY)) + ); + })); } public void testRemoteStoreOverrideTranslogRepoIndexSettings() { - Settings settings = Settings.builder() - .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT) - .put(CLUSTER_REMOTE_STORE_ENABLED_SETTING.getKey(), true) - .put(CLUSTER_REMOTE_STORE_REPOSITORY_SETTING.getKey(), "my-segment-repo-1") - .put(CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING.getKey(), "my-translog-repo-1") - .build(); - FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - - request = new CreateIndexClusterStateUpdateRequest("create index", "test", "test"); final Settings.Builder requestSettings = Settings.builder(); requestSettings.put(SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, "my-custom-repo"); - request.settings(requestSettings.build()); - Settings indexSettings = aggregateIndexSettings( - ClusterState.EMPTY_STATE, - request, - Settings.EMPTY, - null, - settings, - IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, - randomShardLimitService(), - Collections.emptySet() - ); - verifyRemoteStoreIndexSettings( - indexSettings, - "true", - "my-segment-repo-1", - "my-custom-repo", - ReplicationType.SEGMENT.toString(), - IndexSettings.DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL - ); - } - - public void testRemoteStoreOverrideReplicationTypeIndexSettings() { - Settings settings = Settings.builder() - .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT) - .put(CLUSTER_REMOTE_STORE_ENABLED_SETTING.getKey(), true) - .put(CLUSTER_REMOTE_STORE_REPOSITORY_SETTING.getKey(), "my-segment-repo-1") - .put(CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING.getKey(), "my-translog-repo-1") - .build(); - FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); + withTemporaryClusterService(((clusterService, threadPool) -> { + MetadataCreateIndexService checkerService = new MetadataCreateIndexService( + Settings.EMPTY, + clusterService, + null, + null, + null, + createTestShardLimitService(randomIntBetween(1, 1000), false, clusterService), + new Environment(Settings.builder().put("path.home", "dummy").build(), null), + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + threadPool, + null, + new SystemIndices(Collections.emptyMap()), + true, + new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()) + ); - request = new CreateIndexClusterStateUpdateRequest("create index", "test", "test"); - final Settings.Builder requestSettings = Settings.builder(); - requestSettings.put(SETTING_REPLICATION_TYPE, ReplicationType.DOCUMENT); - request.settings(requestSettings.build()); - Settings indexSettings = aggregateIndexSettings( - ClusterState.EMPTY_STATE, - request, - Settings.EMPTY, - null, - settings, - IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, - randomShardLimitService(), - Collections.emptySet() - ); - verifyRemoteStoreIndexSettings( - indexSettings, - null, - null, - null, - ReplicationType.DOCUMENT.toString(), - IndexSettings.DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL - ); + final List validationErrors = checkerService.getIndexSettingsValidationErrors( + requestSettings.build(), + true, + Optional.empty() + ); + assertThat(validationErrors.size(), is(1)); + assertThat( + validationErrors.get(0), + is(String.format(Locale.ROOT, "expected [%s] to be private but it was not", SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY)) + ); + })); } public void testBuildIndexMetadata() { diff --git a/server/src/test/java/org/opensearch/index/IndexSettingsTests.java b/server/src/test/java/org/opensearch/index/IndexSettingsTests.java index f91905fea9561..70f43c7ed52e3 100644 --- a/server/src/test/java/org/opensearch/index/IndexSettingsTests.java +++ b/server/src/test/java/org/opensearch/index/IndexSettingsTests.java @@ -779,6 +779,7 @@ public void testRemoteStoreExplicitSetting() { "index", Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) .build() ); @@ -795,22 +796,6 @@ public void testRemoteTranslogStoreDefaultSetting() { assertFalse(settings.isRemoteTranslogStoreEnabled()); } - public void testUpdateRemoteStoreFails() { - Set> remoteStoreSettingSet = new HashSet<>(); - remoteStoreSettingSet.add(IndexMetadata.INDEX_REMOTE_STORE_ENABLED_SETTING); - IndexScopedSettings settings = new IndexScopedSettings(Settings.EMPTY, remoteStoreSettingSet); - SettingsException error = expectThrows( - SettingsException.class, - () -> settings.updateSettings( - Settings.builder().put("index.remote_store.enabled", randomBoolean()).build(), - Settings.builder(), - Settings.builder(), - "index" - ) - ); - assertEquals(error.getMessage(), "final index setting [index.remote_store.enabled], not updateable"); - } - public void testEnablingRemoteStoreFailsWhenReplicationTypeIsDocument() { Settings indexSettings = Settings.builder() .put("index.replication.type", ReplicationType.DOCUMENT) @@ -846,6 +831,7 @@ public void testRemoteRepositoryExplicitSetting() { "index", Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, "repo1") .build() @@ -854,25 +840,6 @@ public void testRemoteRepositoryExplicitSetting() { assertEquals("repo1", settings.getRemoteStoreRepository()); } - public void testUpdateRemoteRepositoryFails() { - Set> remoteStoreSettingSet = new HashSet<>(); - remoteStoreSettingSet.add(IndexMetadata.INDEX_REMOTE_STORE_REPOSITORY_SETTING); - IndexScopedSettings settings = new IndexScopedSettings(Settings.EMPTY, remoteStoreSettingSet); - SettingsException error = expectThrows( - SettingsException.class, - () -> settings.updateSettings( - Settings.builder().put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, randomUnicodeOfLength(10)).build(), - Settings.builder(), - Settings.builder(), - "index" - ) - ); - assertEquals( - error.getMessage(), - String.format(Locale.ROOT, "final index setting [%s], not updateable", IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY) - ); - } - public void testSetRemoteRepositoryFailsWhenRemoteStoreIsNotEnabled() { Settings indexSettings = Settings.builder() .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) @@ -881,7 +848,7 @@ public void testSetRemoteRepositoryFailsWhenRemoteStoreIsNotEnabled() { .build(); IllegalArgumentException iae = expectThrows( IllegalArgumentException.class, - () -> IndexMetadata.INDEX_REMOTE_STORE_REPOSITORY_SETTING.get(indexSettings) + () -> IndexMetadata.INDEX_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING.get(indexSettings) ); assertEquals( String.format( @@ -902,7 +869,7 @@ public void testSetRemoteRepositoryFailsWhenEmptyString() { .build(); IllegalArgumentException iae = expectThrows( IllegalArgumentException.class, - () -> IndexMetadata.INDEX_REMOTE_STORE_REPOSITORY_SETTING.get(indexSettings) + () -> IndexMetadata.INDEX_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING.get(indexSettings) ); assertEquals( String.format( diff --git a/server/src/test/java/org/opensearch/index/remote/RemoteRefreshSegmentPressureServiceTests.java b/server/src/test/java/org/opensearch/index/remote/RemoteRefreshSegmentPressureServiceTests.java index 7b180a71f3bab..8d444f5d10f26 100644 --- a/server/src/test/java/org/opensearch/index/remote/RemoteRefreshSegmentPressureServiceTests.java +++ b/server/src/test/java/org/opensearch/index/remote/RemoteRefreshSegmentPressureServiceTests.java @@ -16,6 +16,7 @@ import org.opensearch.index.IndexSettings; import org.opensearch.index.shard.IndexShard; import org.opensearch.core.index.shard.ShardId; +import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.index.store.Store; import org.opensearch.test.IndexSettingsModule; import org.opensearch.test.OpenSearchTestCase; @@ -151,7 +152,10 @@ public void testValidateSegmentUploadLag() { } private static IndexShard createIndexShard(ShardId shardId, boolean remoteStoreEnabled) { - Settings settings = Settings.builder().put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, String.valueOf(remoteStoreEnabled)).build(); + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, String.valueOf(remoteStoreEnabled)) + .build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test_index", settings); Store store = mock(Store.class); IndexShard indexShard = mock(IndexShard.class); diff --git a/server/src/test/java/org/opensearch/index/seqno/ReplicationTrackerTests.java b/server/src/test/java/org/opensearch/index/seqno/ReplicationTrackerTests.java index 03a6fc3df824d..5e88b892cc835 100644 --- a/server/src/test/java/org/opensearch/index/seqno/ReplicationTrackerTests.java +++ b/server/src/test/java/org/opensearch/index/seqno/ReplicationTrackerTests.java @@ -1293,7 +1293,10 @@ public void testGlobalCheckpointUpdateWithRemoteTranslogEnabled() { assertThat(allocations.size(), equalTo(active.size() + initializing.size())); final AllocationId primaryId = active.iterator().next(); - Settings settings = Settings.builder().put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true").build(); + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true") + .build(); final ReplicationTracker tracker = newTracker(primaryId, settings); assertThat(tracker.getGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); @@ -1368,7 +1371,10 @@ public void testUpdateFromClusterManagerWithRemoteTranslogEnabled() { assertThat(allocations.size(), equalTo(active.size() + initializing.size())); final AllocationId primaryId = active.iterator().next(); - Settings settings = Settings.builder().put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true").build(); + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true") + .build(); final ReplicationTracker tracker = newTracker(primaryId, settings); assertThat(tracker.getGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); @@ -1438,7 +1444,10 @@ public void testUpdateFromClusterManagerWithRemoteTranslogEnabled() { */ public void testUpdateGlobalCheckpointOnReplicaWithRemoteTranslogEnabled() { final AllocationId active = AllocationId.newInitializing(); - Settings settings = Settings.builder().put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true").build(); + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true") + .build(); final ReplicationTracker tracker = newTracker(active, settings); final long globalCheckpoint = randomLongBetween(NO_OPS_PERFORMED, Long.MAX_VALUE - 1); tracker.updateGlobalCheckpointOnReplica(globalCheckpoint, "test"); @@ -1460,7 +1469,10 @@ public void testMarkAllocationIdAsInSyncWithRemoteTranslogEnabled() throws Excep Set initializing = new HashSet<>(initializingWithCheckpoints.keySet()); final AllocationId primaryId = active.iterator().next(); final AllocationId replicaId = initializing.iterator().next(); - Settings settings = Settings.builder().put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true").build(); + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true") + .build(); final ReplicationTracker tracker = newTracker(primaryId, settings); tracker.updateFromClusterManager(initialClusterStateVersion, ids(active), routingTable(initializing, primaryId)); final long localCheckpoint = randomLongBetween(0, Long.MAX_VALUE - 1); @@ -1485,7 +1497,10 @@ public void testMissingActiveIdsDoesNotPreventAdvanceWithRemoteTranslogEnabled() assigned.putAll(active); assigned.putAll(initializing); AllocationId primaryId = active.keySet().iterator().next(); - Settings settings = Settings.builder().put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true").build(); + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true") + .build(); final ReplicationTracker tracker = newTracker(primaryId, settings); tracker.updateFromClusterManager(randomNonNegativeLong(), ids(active.keySet()), routingTable(initializing.keySet(), primaryId)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); @@ -1515,7 +1530,10 @@ public void testMissingInSyncIdsDoesNotPreventAdvanceWithRemoteTranslogEnabled() logger.info("active: {}, initializing: {}", active, initializing); AllocationId primaryId = active.keySet().iterator().next(); - Settings settings = Settings.builder().put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true").build(); + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true") + .build(); final ReplicationTracker tracker = newTracker(primaryId, settings); tracker.updateFromClusterManager(randomNonNegativeLong(), ids(active.keySet()), routingTable(initializing.keySet(), primaryId)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); @@ -1540,7 +1558,10 @@ public void testInSyncIdsAreIgnoredIfNotValidatedByClusterManagerWithRemoteTrans final Map initializing = randomAllocationsWithLocalCheckpoints(1, 5); final Map nonApproved = randomAllocationsWithLocalCheckpoints(1, 5); final AllocationId primaryId = active.keySet().iterator().next(); - Settings settings = Settings.builder().put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true").build(); + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true") + .build(); final ReplicationTracker tracker = newTracker(primaryId, settings); tracker.updateFromClusterManager(randomNonNegativeLong(), ids(active.keySet()), routingTable(initializing.keySet(), primaryId)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); @@ -1578,7 +1599,10 @@ public void testInSyncIdsAreRemovedIfNotValidatedByClusterManagerWithRemoteTrans if (randomBoolean()) { allocations.putAll(initializingToBeRemoved); } - Settings settings = Settings.builder().put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true").build(); + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true") + .build(); final ReplicationTracker tracker = newTracker(primaryId, settings); tracker.updateFromClusterManager(initialClusterStateVersion, ids(active), routingTable(initializing, primaryId)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); @@ -1624,7 +1648,10 @@ public void testUpdateAllocationIdsFromClusterManagerWithRemoteTranslogEnabled() final Set initializingIds = activeAndInitializingAllocationIds.v2(); AllocationId primaryId = activeAllocationIds.iterator().next(); IndexShardRoutingTable routingTable = routingTable(initializingIds, primaryId); - Settings settings = Settings.builder().put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true").build(); + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true") + .build(); final ReplicationTracker tracker = newTracker(primaryId, settings); tracker.updateFromClusterManager(initialClusterStateVersion, ids(activeAllocationIds), routingTable); tracker.activatePrimaryMode(NO_OPS_PERFORMED); @@ -1936,7 +1963,10 @@ public void testSegmentReplicationCheckpointTrackingInvalidAllocationIDs() { } public void testPrimaryContextHandoffWithRemoteTranslogEnabled() throws IOException { - Settings settings = Settings.builder().put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true").build(); + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true") + .build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings); final ShardId shardId = new ShardId("test", "_na_", 0); @@ -2115,7 +2145,10 @@ public void testPrimaryContextHandoffWithRemoteTranslogEnabled() throws IOExcept public void testIllegalStateExceptionIfUnknownAllocationIdWithRemoteTranslogEnabled() { final AllocationId active = AllocationId.newInitializing(); final AllocationId initializing = AllocationId.newInitializing(); - Settings settings = Settings.builder().put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true").build(); + Settings settings = Settings.builder() + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, "true") + .build(); final ReplicationTracker tracker = newTracker(active, settings); tracker.updateFromClusterManager( randomNonNegativeLong(), diff --git a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java index 1340ff1868a11..96fa53fbf0fc2 100644 --- a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java @@ -1260,6 +1260,7 @@ public void testGetChangesSnapshotThrowsAssertForRemoteStore() throws IOExceptio .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 2) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) .build(); final IndexMetadata.Builder indexMetadata = IndexMetadata.builder(shardRouting.getIndexName()).settings(settings).primaryTerm(0, 1); @@ -4839,12 +4840,13 @@ public void testTranslogFactoryForRemoteTranslogBackedReplicaShard() throws IOEx .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, "seg-test") .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, "txlog-test") .build(); final IndexShard replicaShard = newStartedShard(false, primarySettings, new NRTReplicationEngineFactory()); - assertEquals(replicaShard.getEngine().getClass(), InternalEngine.class); + assertEquals(replicaShard.getEngine().getClass(), NRTReplicationEngine.class); assertEquals(replicaShard.getEngine().config().getTranslogFactory().getClass(), InternalTranslogFactory.class); closeShards(replicaShard); } diff --git a/server/src/test/java/org/opensearch/index/translog/RemoteFSTranslogTests.java b/server/src/test/java/org/opensearch/index/translog/RemoteFSTranslogTests.java index 349f78c3c2d7b..707a58535f21d 100644 --- a/server/src/test/java/org/opensearch/index/translog/RemoteFSTranslogTests.java +++ b/server/src/test/java/org/opensearch/index/translog/RemoteFSTranslogTests.java @@ -47,6 +47,7 @@ import org.opensearch.core.index.shard.ShardId; import org.opensearch.index.translog.transfer.BlobStoreTransferService; import org.opensearch.indices.recovery.RecoverySettings; +import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.repositories.blobstore.BlobStoreRepository; import org.opensearch.repositories.blobstore.BlobStoreTestUtil; import org.opensearch.repositories.fs.FsRepository; @@ -182,6 +183,7 @@ private TranslogConfig getTranslogConfig(final Path path) { // only randomize between nog age retention and a long one, so failures will have a chance of reproducing .put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), randomBoolean() ? "-1ms" : "1h") .put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), randomIntBetween(-1, 2048) + "b") + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) .build(); return getTranslogConfig(path, settings); diff --git a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryHelperTests.java b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryHelperTests.java new file mode 100644 index 0000000000000..0dbc0372458b5 --- /dev/null +++ b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryHelperTests.java @@ -0,0 +1,140 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.repositories.blobstore; + +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.client.Client; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.blobstore.BlobContainer; +import org.opensearch.common.blobstore.BlobPath; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.env.Environment; +import org.opensearch.index.IndexModule; +import org.opensearch.index.IndexService; +import org.opensearch.index.IndexSettings; +import org.opensearch.index.store.RemoteBufferedOutputDirectory; +import org.opensearch.indices.IndicesService; +import org.opensearch.indices.recovery.RecoverySettings; +import org.opensearch.indices.replication.common.ReplicationType; +import org.opensearch.plugins.Plugin; +import org.opensearch.plugins.RepositoryPlugin; +import org.opensearch.repositories.RepositoriesService; +import org.opensearch.repositories.Repository; +import org.opensearch.repositories.fs.FsRepository; +import org.opensearch.test.OpenSearchIntegTestCase; +import org.opensearch.test.OpenSearchSingleNodeTestCase; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class BlobStoreRepositoryHelperTests extends OpenSearchSingleNodeTestCase { + + static final String REPO_TYPE = "fsLike"; + + protected Collection> getPlugins() { + return Arrays.asList(FsLikeRepoPlugin.class); + } + + protected String[] getLockFilesInRemoteStore(String remoteStoreIndex, String remoteStoreRepository) throws IOException { + String indexUUID = client().admin() + .indices() + .prepareGetSettings(remoteStoreIndex) + .get() + .getSetting(remoteStoreIndex, IndexMetadata.SETTING_INDEX_UUID); + final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); + final BlobStoreRepository remoteStorerepository = (BlobStoreRepository) repositoriesService.repository(remoteStoreRepository); + BlobPath shardLevelBlobPath = remoteStorerepository.basePath().add(indexUUID).add("0").add("segments").add("lock_files"); + BlobContainer blobContainer = remoteStorerepository.blobStore().blobContainer(shardLevelBlobPath); + try (RemoteBufferedOutputDirectory lockDirectory = new RemoteBufferedOutputDirectory(blobContainer)) { + return Arrays.stream(lockDirectory.listAll()).filter(lock -> lock.endsWith(".lock")).toArray(String[]::new); + } + } + + // the reason for this plug-in is to drop any assertSnapshotOrGenericThread as mostly all access in this test goes from test threads + public static class FsLikeRepoPlugin extends Plugin implements RepositoryPlugin { + + @Override + public Map getRepositories( + Environment env, + NamedXContentRegistry namedXContentRegistry, + ClusterService clusterService, + RecoverySettings recoverySettings + ) { + return Collections.singletonMap( + REPO_TYPE, + (metadata) -> new FsRepository(metadata, env, namedXContentRegistry, clusterService, recoverySettings) { + @Override + protected void assertSnapshotOrGenericThread() { + // eliminate thread name check as we access blobStore on test/main threads + } + } + ); + } + } + + protected void createRepository(Client client, String repoName) { + AcknowledgedResponse putRepositoryResponse = client.admin() + .cluster() + .preparePutRepository(repoName) + .setType(REPO_TYPE) + .setSettings( + Settings.builder().put(node().settings()).put("location", OpenSearchIntegTestCase.randomRepoPath(node().settings())) + ) + .get(); + assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); + } + + protected void createRepository(Client client, String repoName, Settings repoSettings) { + AcknowledgedResponse putRepositoryResponse = client.admin() + .cluster() + .preparePutRepository(repoName) + .setType(REPO_TYPE) + .setSettings(repoSettings) + .get(); + assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); + } + + protected void updateRepository(Client client, String repoName, Settings repoSettings) { + createRepository(client, repoName, repoSettings); + } + + protected Settings getRemoteStoreBackedIndexSettings(String remoteStoreRepo) { + return Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, "1") + .put("index.refresh_interval", "300s") + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, "1") + .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.FS.getSettingsKey()) + .put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false) + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .build(); + } + + protected void indexDocuments(Client client, String indexName) { + int numDocs = randomIntBetween(10, 20); + for (int i = 0; i < numDocs; i++) { + String id = Integer.toString(i); + client.prepareIndex(indexName).setId(id).setSource("text", "sometext").get(); + } + client.admin().indices().prepareFlush(indexName).get(); + } + + protected IndexSettings getIndexSettings(String indexName) { + final IndicesService indicesService = getInstanceFromNode(IndicesService.class); + final IndexService indexService = indicesService.indexService(resolveIndex(indexName)); + return indexService.getIndexSettings(); + } + +} diff --git a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryRemoteIndexTests.java b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryRemoteIndexTests.java new file mode 100644 index 0000000000000..f25498b8c8368 --- /dev/null +++ b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryRemoteIndexTests.java @@ -0,0 +1,371 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.repositories.blobstore; + +import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; +import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; +import org.opensearch.client.Client; +import org.opensearch.cluster.metadata.RepositoryMetadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.index.IndexSettings; +import org.opensearch.index.snapshots.blobstore.RemoteStoreShardShallowCopySnapshot; +import org.opensearch.indices.replication.common.ReplicationType; +import org.opensearch.repositories.IndexId; +import org.opensearch.repositories.RepositoriesService; +import org.opensearch.repositories.RepositoryData; +import org.opensearch.snapshots.SnapshotId; +import org.opensearch.test.FeatureFlagSetter; +import org.opensearch.test.OpenSearchIntegTestCase; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.equalTo; +import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING; +import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_STORE_ENABLED_SETTING; +import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING; +import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING; + +/** + * Tests for the {@link BlobStoreRepository} and its subclasses. + */ +public class BlobStoreRepositoryRemoteIndexTests extends BlobStoreRepositoryHelperTests { + + @Override + protected Settings nodeSettings() { + return Settings.builder() + .put(super.nodeSettings()) + .put(FeatureFlags.REMOTE_STORE, "true") + .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT) + .put(CLUSTER_REMOTE_STORE_ENABLED_SETTING.getKey(), true) + .put(CLUSTER_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING.getKey(), "test-rs-repo") + .put(CLUSTER_REMOTE_TRANSLOG_REPOSITORY_SETTING.getKey(), "test-rs-repo") + .build(); + } + + // Validate Scenario Normal Snapshot -> remoteStoreShallowCopy Snapshot -> normal Snapshot + public void testRetrieveShallowCopySnapshotCase1() throws IOException { + FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); + final Client client = client(); + final String snapshotRepositoryName = "test-repo"; + final String remoteStoreRepositoryName = "test-rs-repo"; + + logger.info("--> creating snapshot repository"); + + Settings snapshotRepoSettings = Settings.builder() + .put(node().settings()) + .put("location", OpenSearchIntegTestCase.randomRepoPath(node().settings())) + .build(); + createRepository(client, snapshotRepositoryName, snapshotRepoSettings); + + logger.info("--> creating remote store repository"); + Settings remoteStoreRepoSettings = Settings.builder() + .put(node().settings()) + .put("location", OpenSearchIntegTestCase.randomRepoPath(node().settings())) + .build(); + createRepository(client, remoteStoreRepositoryName, remoteStoreRepoSettings); + + logger.info("--> creating an index and indexing documents"); + final String indexName = "test-idx"; + createIndex(indexName); + ensureGreen(); + indexDocuments(client, indexName); + + logger.info("--> creating a remote store enabled index and indexing documents"); + final String remoteStoreIndexName = "test-rs-idx"; + Settings indexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepositoryName); + createIndex(remoteStoreIndexName, indexSettings); + indexDocuments(client, remoteStoreIndexName); + + logger.info("--> create first snapshot"); + CreateSnapshotResponse createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-1") + .setWaitForCompletion(true) + .setIndices(indexName, remoteStoreIndexName) + .get(); + final SnapshotId snapshotId1 = createSnapshotResponse.getSnapshotInfo().snapshotId(); + + String[] lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); + assert (lockFiles.length == 0) : "there should be no lock files present in directory, but found " + Arrays.toString(lockFiles); + logger.info("--> create remote index shallow snapshot"); + Settings snapshotRepoSettingsForShallowCopy = Settings.builder() + .put(snapshotRepoSettings) + .put(BlobStoreRepository.REMOTE_STORE_INDEX_SHALLOW_COPY.getKey(), Boolean.TRUE) + .build(); + updateRepository(client, snapshotRepositoryName, snapshotRepoSettingsForShallowCopy); + + createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-2") + .setWaitForCompletion(true) + .setIndices(indexName, remoteStoreIndexName) + .get(); + final SnapshotId snapshotId2 = createSnapshotResponse.getSnapshotInfo().snapshotId(); + + lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); + assert (lockFiles.length == 1) : "there should be only one lock file, but found " + Arrays.toString(lockFiles); + assert lockFiles[0].endsWith(snapshotId2.getUUID() + ".lock"); + + logger.info("--> create another normal snapshot"); + updateRepository(client, snapshotRepositoryName, snapshotRepoSettings); + createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-3") + .setWaitForCompletion(true) + .setIndices(indexName, remoteStoreIndexName) + .get(); + final SnapshotId snapshotId3 = createSnapshotResponse.getSnapshotInfo().snapshotId(); + + lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); + assert (lockFiles.length == 1) : "there should be only one lock file, but found " + Arrays.toString(lockFiles); + assert lockFiles[0].endsWith(snapshotId2.getUUID() + ".lock"); + + logger.info("--> make sure the node's repository can resolve the snapshots"); + final List originalSnapshots = Arrays.asList(snapshotId1, snapshotId2, snapshotId3); + + final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); + final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(snapshotRepositoryName); + RepositoryData repositoryData = OpenSearchBlobStoreRepositoryIntegTestCase.getRepositoryData(repository); + IndexId indexId = repositoryData.resolveIndexId(remoteStoreIndexName); + + List snapshotIds = repositoryData.getSnapshotIds() + .stream() + .sorted((s1, s2) -> s1.getName().compareTo(s2.getName())) + .collect(Collectors.toList()); + assertThat(snapshotIds, equalTo(originalSnapshots)); + + // shallow copy shard metadata - getRemoteStoreShallowCopyShardMetadata + RemoteStoreShardShallowCopySnapshot shardShallowCopySnapshot = repository.getRemoteStoreShallowCopyShardMetadata( + snapshotId2, + indexId, + new ShardId(remoteStoreIndexName, indexId.getId(), 0) + ); + assertEquals(shardShallowCopySnapshot.getRemoteStoreRepository(), remoteStoreRepositoryName); + } + + public void testGetRemoteStoreShallowCopyShardMetadata() throws IOException { + FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); + final Client client = client(); + final String snapshotRepositoryName = "test-repo"; + final String remoteStoreRepositoryName = "test-rs-repo"; + + logger.info("--> creating snapshot repository"); + + Settings snapshotRepoSettings = Settings.builder() + .put(node().settings()) + .put("location", OpenSearchIntegTestCase.randomRepoPath(node().settings())) + .build(); + createRepository(client, snapshotRepositoryName, snapshotRepoSettings); + + logger.info("--> creating remote store repository"); + Settings remoteStoreRepoSettings = Settings.builder() + .put(node().settings()) + .put("location", OpenSearchIntegTestCase.randomRepoPath(node().settings())) + .build(); + createRepository(client, remoteStoreRepositoryName, remoteStoreRepoSettings); + + logger.info("--> creating a remote store enabled index and indexing documents"); + final String remoteStoreIndexName = "test-rs-idx"; + Settings indexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepositoryName); + createIndex(remoteStoreIndexName, indexSettings); + indexDocuments(client, remoteStoreIndexName); + + logger.info("--> create remote index shallow snapshot"); + Settings snapshotRepoSettingsForShallowCopy = Settings.builder() + .put(snapshotRepoSettings) + .put(BlobStoreRepository.REMOTE_STORE_INDEX_SHALLOW_COPY.getKey(), Boolean.TRUE) + .build(); + updateRepository(client, snapshotRepositoryName, snapshotRepoSettingsForShallowCopy); + + CreateSnapshotResponse createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-2") + .setWaitForCompletion(true) + .setIndices(remoteStoreIndexName) + .get(); + final SnapshotId snapshotId = createSnapshotResponse.getSnapshotInfo().snapshotId(); + + String[] lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); + assert (lockFiles.length == 1) : "there should be only one lock file, but found " + Arrays.toString(lockFiles); + assert lockFiles[0].endsWith(snapshotId.getUUID() + ".lock"); + + final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); + final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(snapshotRepositoryName); + RepositoryData repositoryData = OpenSearchBlobStoreRepositoryIntegTestCase.getRepositoryData(repository); + IndexSettings indexSetting = getIndexSettings(remoteStoreIndexName); + IndexId indexId = repositoryData.resolveIndexId(remoteStoreIndexName); + RemoteStoreShardShallowCopySnapshot shardShallowCopySnapshot = repository.getRemoteStoreShallowCopyShardMetadata( + snapshotId, + indexId, + new ShardId(remoteStoreIndexName, indexSetting.getUUID(), 0) + ); + assertEquals(shardShallowCopySnapshot.getRemoteStoreRepository(), remoteStoreRepositoryName); + assertEquals(shardShallowCopySnapshot.getIndexUUID(), indexSetting.getUUID()); + assertEquals(shardShallowCopySnapshot.getRepositoryBasePath(), ""); + } + + // Validate Scenario remoteStoreShallowCopy Snapshot -> remoteStoreShallowCopy Snapshot + // -> remoteStoreShallowCopy Snapshot -> normal snapshot + public void testRetrieveShallowCopySnapshotCase2() throws IOException { + FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); + final Client client = client(); + final String snapshotRepositoryName = "test-repo"; + final String remoteStoreRepositoryName = "test-rs-repo"; + + logger.info("--> creating snapshot repository"); + Settings snapshotRepoSettings = Settings.builder() + .put(node().settings()) + .put("location", OpenSearchIntegTestCase.randomRepoPath(node().settings())) + .build(); + createRepository(client, snapshotRepositoryName, snapshotRepoSettings); + + GetRepositoriesResponse updatedGetRepositoriesResponse = client.admin() + .cluster() + .prepareGetRepositories(snapshotRepositoryName) + .get(); + + RepositoryMetadata updatedRepositoryMetadata = updatedGetRepositoriesResponse.repositories().get(0); + + assertFalse(updatedRepositoryMetadata.settings().getAsBoolean(BlobStoreRepository.REMOTE_STORE_INDEX_SHALLOW_COPY.getKey(), false)); + + logger.info("--> creating remote store repository"); + createRepository(client, remoteStoreRepositoryName); + + logger.info("--> creating an index and indexing documents"); + final String indexName = "test-idx"; + createIndex(indexName); + ensureGreen(); + indexDocuments(client, indexName); + + logger.info("--> creating a remote store enabled index and indexing documents"); + final String remoteStoreIndexName = "test-rs-idx"; + Settings indexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepositoryName); + createIndex(remoteStoreIndexName, indexSettings); + indexDocuments(client, remoteStoreIndexName); + + logger.info("--> create first remote index shallow snapshot"); + + Settings snapshotRepoSettingsForShallowCopy = Settings.builder() + .put(snapshotRepoSettings) + .put(BlobStoreRepository.REMOTE_STORE_INDEX_SHALLOW_COPY.getKey(), true) + .build(); + updateRepository(client, snapshotRepositoryName, snapshotRepoSettingsForShallowCopy); + + updatedGetRepositoriesResponse = client.admin().cluster().prepareGetRepositories(snapshotRepositoryName).get(); + + updatedRepositoryMetadata = updatedGetRepositoriesResponse.repositories().get(0); + + assertTrue(updatedRepositoryMetadata.settings().getAsBoolean(BlobStoreRepository.REMOTE_STORE_INDEX_SHALLOW_COPY.getKey(), false)); + + CreateSnapshotResponse createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-1") + .setWaitForCompletion(true) + .setIndices(indexName, remoteStoreIndexName) + .get(); + final SnapshotId snapshotId1 = createSnapshotResponse.getSnapshotInfo().snapshotId(); + + String[] lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); + assert (lockFiles.length == 1) : "lock files are " + Arrays.toString(lockFiles); + assert lockFiles[0].endsWith(snapshotId1.getUUID() + ".lock"); + + logger.info("--> create second remote index shallow snapshot"); + createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-2") + .setWaitForCompletion(true) + .setIndices(indexName, remoteStoreIndexName) + .get(); + final SnapshotId snapshotId2 = createSnapshotResponse.getSnapshotInfo().snapshotId(); + + lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); + assert (lockFiles.length == 2) : "lock files are " + Arrays.toString(lockFiles); + List shallowCopySnapshotIDs = Arrays.asList(snapshotId1, snapshotId2); + for (SnapshotId snapshotId : shallowCopySnapshotIDs) { + assert lockFiles[0].contains(snapshotId.getUUID()) || lockFiles[1].contains(snapshotId.getUUID()); + } + logger.info("--> create third remote index shallow snapshot"); + createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-3") + .setWaitForCompletion(true) + .setIndices(indexName, remoteStoreIndexName) + .get(); + final SnapshotId snapshotId3 = createSnapshotResponse.getSnapshotInfo().snapshotId(); + + lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); + assert (lockFiles.length == 3); + shallowCopySnapshotIDs = Arrays.asList(snapshotId1, snapshotId2, snapshotId3); + for (SnapshotId snapshotId : shallowCopySnapshotIDs) { + assert lockFiles[0].contains(snapshotId.getUUID()) + || lockFiles[1].contains(snapshotId.getUUID()) + || lockFiles[2].contains(snapshotId.getUUID()); + } + logger.info("--> create normal snapshot"); + createRepository(client, snapshotRepositoryName, snapshotRepoSettings); + createSnapshotResponse = client.admin() + .cluster() + .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-4") + .setWaitForCompletion(true) + .setIndices(indexName, remoteStoreIndexName) + .get(); + final SnapshotId snapshotId4 = createSnapshotResponse.getSnapshotInfo().snapshotId(); + + lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); + assert (lockFiles.length == 3) : "lock files are " + Arrays.toString(lockFiles); + shallowCopySnapshotIDs = Arrays.asList(snapshotId1, snapshotId2, snapshotId3); + for (SnapshotId snapshotId : shallowCopySnapshotIDs) { + assert lockFiles[0].contains(snapshotId.getUUID()) + || lockFiles[1].contains(snapshotId.getUUID()) + || lockFiles[2].contains(snapshotId.getUUID()); + } + + logger.info("--> make sure the node's repository can resolve the snapshots"); + final List originalSnapshots = Arrays.asList(snapshotId1, snapshotId2, snapshotId3, snapshotId4); + + final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); + final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(snapshotRepositoryName); + List snapshotIds = OpenSearchBlobStoreRepositoryIntegTestCase.getRepositoryData(repository) + .getSnapshotIds() + .stream() + .sorted((s1, s2) -> s1.getName().compareTo(s2.getName())) + .collect(Collectors.toList()); + assertThat(snapshotIds, equalTo(originalSnapshots)); + } + +} diff --git a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java index 893631ce4b564..f1253e377c819 100644 --- a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -33,31 +33,19 @@ package org.opensearch.repositories.blobstore; import org.opensearch.Version; -import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.client.Client; -import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.metadata.RepositoryMetadata; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.UUIDs; -import org.opensearch.common.blobstore.BlobContainer; -import org.opensearch.common.blobstore.BlobPath; import org.opensearch.common.settings.Settings; import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.common.util.FeatureFlags; -import org.opensearch.core.index.shard.ShardId; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; -import org.opensearch.index.IndexModule; -import org.opensearch.index.IndexService; -import org.opensearch.index.IndexSettings; -import org.opensearch.index.snapshots.blobstore.RemoteStoreShardShallowCopySnapshot; -import org.opensearch.index.store.RemoteBufferedOutputDirectory; -import org.opensearch.indices.IndicesService; import org.opensearch.indices.recovery.RecoverySettings; -import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.plugins.Plugin; import org.opensearch.plugins.RepositoryPlugin; import org.opensearch.repositories.IndexId; @@ -69,11 +57,8 @@ import org.opensearch.repositories.fs.FsRepository; import org.opensearch.snapshots.SnapshotId; import org.opensearch.snapshots.SnapshotState; -import org.opensearch.test.FeatureFlagSetter; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.OpenSearchSingleNodeTestCase; -import java.io.IOException; import java.nio.file.Path; import java.util.Arrays; import java.util.Collection; @@ -90,7 +75,7 @@ /** * Tests for the {@link BlobStoreRepository} and its subclasses. */ -public class BlobStoreRepositoryTests extends OpenSearchSingleNodeTestCase { +public class BlobStoreRepositoryTests extends BlobStoreRepositoryHelperTests { static final String REPO_TYPE = "fsLike"; @@ -181,367 +166,8 @@ public void testRetrieveSnapshots() throws Exception { assertThat(snapshotIds, equalTo(originalSnapshots)); } - private void createRepository(Client client, String repoName) { - AcknowledgedResponse putRepositoryResponse = client.admin() - .cluster() - .preparePutRepository(repoName) - .setType(REPO_TYPE) - .setSettings( - Settings.builder().put(node().settings()).put("location", OpenSearchIntegTestCase.randomRepoPath(node().settings())) - ) - .get(); - assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); - } - - private void createRepository(Client client, String repoName, Settings repoSettings) { - AcknowledgedResponse putRepositoryResponse = client.admin() - .cluster() - .preparePutRepository(repoName) - .setType(REPO_TYPE) - .setSettings(repoSettings) - .get(); - assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); - } - - private void updateRepository(Client client, String repoName, Settings repoSettings) { - createRepository(client, repoName, repoSettings); - } - - private Settings getRemoteStoreBackedIndexSettings(String remoteStoreRepo) { - return Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, "1") - .put("index.refresh_interval", "300s") - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, "1") - .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.FS.getSettingsKey()) - .put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false) - .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) - .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) - .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, remoteStoreRepo) - .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, remoteStoreRepo) - .build(); - } - - private void indexDocuments(Client client, String indexName) { - int numDocs = randomIntBetween(10, 20); - for (int i = 0; i < numDocs; i++) { - String id = Integer.toString(i); - client.prepareIndex(indexName).setId(id).setSource("text", "sometext").get(); - } - client.admin().indices().prepareFlush(indexName).get(); - } - - private String[] getLockFilesInRemoteStore(String remoteStoreIndex, String remoteStoreRepository) throws IOException { - String indexUUID = client().admin() - .indices() - .prepareGetSettings(remoteStoreIndex) - .get() - .getSetting(remoteStoreIndex, IndexMetadata.SETTING_INDEX_UUID); - final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); - final BlobStoreRepository remoteStorerepository = (BlobStoreRepository) repositoriesService.repository(remoteStoreRepository); - BlobPath shardLevelBlobPath = remoteStorerepository.basePath().add(indexUUID).add("0").add("segments").add("lock_files"); - BlobContainer blobContainer = remoteStorerepository.blobStore().blobContainer(shardLevelBlobPath); - try (RemoteBufferedOutputDirectory lockDirectory = new RemoteBufferedOutputDirectory(blobContainer)) { - return Arrays.stream(lockDirectory.listAll()).filter(lock -> lock.endsWith(".lock")).toArray(String[]::new); - } - } - - // Validate Scenario Normal Snapshot -> remoteStoreShallowCopy Snapshot -> normal Snapshot - public void testRetrieveShallowCopySnapshotCase1() throws IOException { - FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - final Client client = client(); - final String snapshotRepositoryName = "test-repo"; - final String remoteStoreRepositoryName = "test-rs-repo"; - - logger.info("--> creating snapshot repository"); - - Settings snapshotRepoSettings = Settings.builder() - .put(node().settings()) - .put("location", OpenSearchIntegTestCase.randomRepoPath(node().settings())) - .build(); - createRepository(client, snapshotRepositoryName, snapshotRepoSettings); - - logger.info("--> creating remote store repository"); - Settings remoteStoreRepoSettings = Settings.builder() - .put(node().settings()) - .put("location", OpenSearchIntegTestCase.randomRepoPath(node().settings())) - .build(); - createRepository(client, remoteStoreRepositoryName, remoteStoreRepoSettings); - - logger.info("--> creating an index and indexing documents"); - final String indexName = "test-idx"; - createIndex(indexName); - ensureGreen(); - indexDocuments(client, indexName); - - logger.info("--> creating a remote store enabled index and indexing documents"); - final String remoteStoreIndexName = "test-rs-idx"; - Settings indexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepositoryName); - createIndex(remoteStoreIndexName, indexSettings); - indexDocuments(client, remoteStoreIndexName); - - logger.info("--> create first snapshot"); - CreateSnapshotResponse createSnapshotResponse = client.admin() - .cluster() - .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-1") - .setWaitForCompletion(true) - .setIndices(indexName, remoteStoreIndexName) - .get(); - final SnapshotId snapshotId1 = createSnapshotResponse.getSnapshotInfo().snapshotId(); - - String[] lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); - assert (lockFiles.length == 0) : "there should be no lock files present in directory, but found " + Arrays.toString(lockFiles); - logger.info("--> create remote index shallow snapshot"); - Settings snapshotRepoSettingsForShallowCopy = Settings.builder() - .put(snapshotRepoSettings) - .put(BlobStoreRepository.REMOTE_STORE_INDEX_SHALLOW_COPY.getKey(), Boolean.TRUE) - .build(); - updateRepository(client, snapshotRepositoryName, snapshotRepoSettingsForShallowCopy); - - createSnapshotResponse = client.admin() - .cluster() - .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-2") - .setWaitForCompletion(true) - .setIndices(indexName, remoteStoreIndexName) - .get(); - final SnapshotId snapshotId2 = createSnapshotResponse.getSnapshotInfo().snapshotId(); - - lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); - assert (lockFiles.length == 1) : "there should be only one lock file, but found " + Arrays.toString(lockFiles); - assert lockFiles[0].endsWith(snapshotId2.getUUID() + ".lock"); - - logger.info("--> create another normal snapshot"); - updateRepository(client, snapshotRepositoryName, snapshotRepoSettings); - createSnapshotResponse = client.admin() - .cluster() - .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-3") - .setWaitForCompletion(true) - .setIndices(indexName, remoteStoreIndexName) - .get(); - final SnapshotId snapshotId3 = createSnapshotResponse.getSnapshotInfo().snapshotId(); - - lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); - assert (lockFiles.length == 1) : "there should be only one lock file, but found " + Arrays.toString(lockFiles); - assert lockFiles[0].endsWith(snapshotId2.getUUID() + ".lock"); - - logger.info("--> make sure the node's repository can resolve the snapshots"); - final List originalSnapshots = Arrays.asList(snapshotId1, snapshotId2, snapshotId3); - - final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); - final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(snapshotRepositoryName); - RepositoryData repositoryData = OpenSearchBlobStoreRepositoryIntegTestCase.getRepositoryData(repository); - IndexId indexId = repositoryData.resolveIndexId(remoteStoreIndexName); - - List snapshotIds = repositoryData.getSnapshotIds() - .stream() - .sorted((s1, s2) -> s1.getName().compareTo(s2.getName())) - .collect(Collectors.toList()); - assertThat(snapshotIds, equalTo(originalSnapshots)); - - // shallow copy shard metadata - getRemoteStoreShallowCopyShardMetadata - RemoteStoreShardShallowCopySnapshot shardShallowCopySnapshot = repository.getRemoteStoreShallowCopyShardMetadata( - snapshotId2, - indexId, - new ShardId(remoteStoreIndexName, indexId.getId(), 0) - ); - assertEquals(shardShallowCopySnapshot.getRemoteStoreRepository(), remoteStoreRepositoryName); - } - - public void testGetRemoteStoreShallowCopyShardMetadata() throws IOException { - FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - final Client client = client(); - final String snapshotRepositoryName = "test-repo"; - final String remoteStoreRepositoryName = "test-rs-repo"; - - logger.info("--> creating snapshot repository"); - - Settings snapshotRepoSettings = Settings.builder() - .put(node().settings()) - .put("location", OpenSearchIntegTestCase.randomRepoPath(node().settings())) - .build(); - createRepository(client, snapshotRepositoryName, snapshotRepoSettings); - - logger.info("--> creating remote store repository"); - Settings remoteStoreRepoSettings = Settings.builder() - .put(node().settings()) - .put("location", OpenSearchIntegTestCase.randomRepoPath(node().settings())) - .build(); - createRepository(client, remoteStoreRepositoryName, remoteStoreRepoSettings); - - logger.info("--> creating a remote store enabled index and indexing documents"); - final String remoteStoreIndexName = "test-rs-idx"; - Settings indexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepositoryName); - createIndex(remoteStoreIndexName, indexSettings); - indexDocuments(client, remoteStoreIndexName); - - logger.info("--> create remote index shallow snapshot"); - Settings snapshotRepoSettingsForShallowCopy = Settings.builder() - .put(snapshotRepoSettings) - .put(BlobStoreRepository.REMOTE_STORE_INDEX_SHALLOW_COPY.getKey(), Boolean.TRUE) - .build(); - updateRepository(client, snapshotRepositoryName, snapshotRepoSettingsForShallowCopy); - - CreateSnapshotResponse createSnapshotResponse = client.admin() - .cluster() - .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-2") - .setWaitForCompletion(true) - .setIndices(remoteStoreIndexName) - .get(); - final SnapshotId snapshotId = createSnapshotResponse.getSnapshotInfo().snapshotId(); - - String[] lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); - assert (lockFiles.length == 1) : "there should be only one lock file, but found " + Arrays.toString(lockFiles); - assert lockFiles[0].endsWith(snapshotId.getUUID() + ".lock"); - - final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); - final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(snapshotRepositoryName); - RepositoryData repositoryData = OpenSearchBlobStoreRepositoryIntegTestCase.getRepositoryData(repository); - IndexSettings indexSetting = getIndexSettings(remoteStoreIndexName); - IndexId indexId = repositoryData.resolveIndexId(remoteStoreIndexName); - RemoteStoreShardShallowCopySnapshot shardShallowCopySnapshot = repository.getRemoteStoreShallowCopyShardMetadata( - snapshotId, - indexId, - new ShardId(remoteStoreIndexName, indexSetting.getUUID(), 0) - ); - assertEquals(shardShallowCopySnapshot.getRemoteStoreRepository(), remoteStoreRepositoryName); - assertEquals(shardShallowCopySnapshot.getIndexUUID(), indexSetting.getUUID()); - assertEquals(shardShallowCopySnapshot.getRepositoryBasePath(), ""); - } - - private IndexSettings getIndexSettings(String indexName) { - final IndicesService indicesService = getInstanceFromNode(IndicesService.class); - final IndexService indexService = indicesService.indexService(resolveIndex(indexName)); - return indexService.getIndexSettings(); - } - // Validate Scenario remoteStoreShallowCopy Snapshot -> remoteStoreShallowCopy Snapshot // -> remoteStoreShallowCopy Snapshot -> normal snapshot - public void testRetrieveShallowCopySnapshotCase2() throws IOException { - FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); - final Client client = client(); - final String snapshotRepositoryName = "test-repo"; - final String remoteStoreRepositoryName = "test-rs-repo"; - - logger.info("--> creating snapshot repository"); - Settings snapshotRepoSettings = Settings.builder() - .put(node().settings()) - .put("location", OpenSearchIntegTestCase.randomRepoPath(node().settings())) - .build(); - createRepository(client, snapshotRepositoryName, snapshotRepoSettings); - - GetRepositoriesResponse updatedGetRepositoriesResponse = client.admin() - .cluster() - .prepareGetRepositories(snapshotRepositoryName) - .get(); - - RepositoryMetadata updatedRepositoryMetadata = updatedGetRepositoriesResponse.repositories().get(0); - - assertFalse(updatedRepositoryMetadata.settings().getAsBoolean(BlobStoreRepository.REMOTE_STORE_INDEX_SHALLOW_COPY.getKey(), false)); - - logger.info("--> creating remote store repository"); - createRepository(client, remoteStoreRepositoryName); - - logger.info("--> creating an index and indexing documents"); - final String indexName = "test-idx"; - createIndex(indexName); - ensureGreen(); - indexDocuments(client, indexName); - - logger.info("--> creating a remote store enabled index and indexing documents"); - final String remoteStoreIndexName = "test-rs-idx"; - Settings indexSettings = getRemoteStoreBackedIndexSettings(remoteStoreRepositoryName); - createIndex(remoteStoreIndexName, indexSettings); - indexDocuments(client, remoteStoreIndexName); - - logger.info("--> create first remote index shallow snapshot"); - - Settings snapshotRepoSettingsForShallowCopy = Settings.builder() - .put(snapshotRepoSettings) - .put(BlobStoreRepository.REMOTE_STORE_INDEX_SHALLOW_COPY.getKey(), true) - .build(); - updateRepository(client, snapshotRepositoryName, snapshotRepoSettingsForShallowCopy); - - updatedGetRepositoriesResponse = client.admin().cluster().prepareGetRepositories(snapshotRepositoryName).get(); - - updatedRepositoryMetadata = updatedGetRepositoriesResponse.repositories().get(0); - - assertTrue(updatedRepositoryMetadata.settings().getAsBoolean(BlobStoreRepository.REMOTE_STORE_INDEX_SHALLOW_COPY.getKey(), false)); - - CreateSnapshotResponse createSnapshotResponse = client.admin() - .cluster() - .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-1") - .setWaitForCompletion(true) - .setIndices(indexName, remoteStoreIndexName) - .get(); - final SnapshotId snapshotId1 = createSnapshotResponse.getSnapshotInfo().snapshotId(); - - String[] lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); - assert (lockFiles.length == 1) : "lock files are " + Arrays.toString(lockFiles); - assert lockFiles[0].endsWith(snapshotId1.getUUID() + ".lock"); - - logger.info("--> create second remote index shallow snapshot"); - createSnapshotResponse = client.admin() - .cluster() - .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-2") - .setWaitForCompletion(true) - .setIndices(indexName, remoteStoreIndexName) - .get(); - final SnapshotId snapshotId2 = createSnapshotResponse.getSnapshotInfo().snapshotId(); - - lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); - assert (lockFiles.length == 2) : "lock files are " + Arrays.toString(lockFiles); - List shallowCopySnapshotIDs = Arrays.asList(snapshotId1, snapshotId2); - for (SnapshotId snapshotId : shallowCopySnapshotIDs) { - assert lockFiles[0].contains(snapshotId.getUUID()) || lockFiles[1].contains(snapshotId.getUUID()); - } - logger.info("--> create third remote index shallow snapshot"); - createSnapshotResponse = client.admin() - .cluster() - .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-3") - .setWaitForCompletion(true) - .setIndices(indexName, remoteStoreIndexName) - .get(); - final SnapshotId snapshotId3 = createSnapshotResponse.getSnapshotInfo().snapshotId(); - - lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); - assert (lockFiles.length == 3); - shallowCopySnapshotIDs = Arrays.asList(snapshotId1, snapshotId2, snapshotId3); - for (SnapshotId snapshotId : shallowCopySnapshotIDs) { - assert lockFiles[0].contains(snapshotId.getUUID()) - || lockFiles[1].contains(snapshotId.getUUID()) - || lockFiles[2].contains(snapshotId.getUUID()); - } - logger.info("--> create normal snapshot"); - createRepository(client, snapshotRepositoryName, snapshotRepoSettings); - createSnapshotResponse = client.admin() - .cluster() - .prepareCreateSnapshot(snapshotRepositoryName, "test-snap-4") - .setWaitForCompletion(true) - .setIndices(indexName, remoteStoreIndexName) - .get(); - final SnapshotId snapshotId4 = createSnapshotResponse.getSnapshotInfo().snapshotId(); - - lockFiles = getLockFilesInRemoteStore(remoteStoreIndexName, remoteStoreRepositoryName); - assert (lockFiles.length == 3) : "lock files are " + Arrays.toString(lockFiles); - shallowCopySnapshotIDs = Arrays.asList(snapshotId1, snapshotId2, snapshotId3); - for (SnapshotId snapshotId : shallowCopySnapshotIDs) { - assert lockFiles[0].contains(snapshotId.getUUID()) - || lockFiles[1].contains(snapshotId.getUUID()) - || lockFiles[2].contains(snapshotId.getUUID()); - } - - logger.info("--> make sure the node's repository can resolve the snapshots"); - final List originalSnapshots = Arrays.asList(snapshotId1, snapshotId2, snapshotId3, snapshotId4); - - final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); - final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(snapshotRepositoryName); - List snapshotIds = OpenSearchBlobStoreRepositoryIntegTestCase.getRepositoryData(repository) - .getSnapshotIds() - .stream() - .sorted((s1, s2) -> s1.getName().compareTo(s2.getName())) - .collect(Collectors.toList()); - assertThat(snapshotIds, equalTo(originalSnapshots)); - } - public void testReadAndWriteSnapshotsThroughIndexFile() throws Exception { final BlobStoreRepository repository = setupRepo(); final long pendingGeneration = repository.metadata.pendingGeneration(); diff --git a/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java index a169fb871ca53..2c6f4f7b15e5d 100644 --- a/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -523,7 +523,7 @@ protected void indexRandomDocs(String index, int numdocs) throws InterruptedExce assertDocCount(index, numdocs); } - protected Settings getRemoteStoreBackedIndexSettings(String remoteStoreRepo) { + protected Settings getRemoteStoreBackedIndexSettings() { return Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, "1") .put("index.refresh_interval", "300s") @@ -531,9 +531,6 @@ protected Settings getRemoteStoreBackedIndexSettings(String remoteStoreRepo) { .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.FS.getSettingsKey()) .put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false) .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) - .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) - .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, remoteStoreRepo) - .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, remoteStoreRepo) .build(); } From f2ca769143d1c589806d5f9adc65e0545eedd753 Mon Sep 17 00:00:00 2001 From: Ticheng Lin <51488860+ticheng-aws@users.noreply.github.com> Date: Tue, 1 Aug 2023 07:26:37 -0700 Subject: [PATCH 36/75] Update version guards to deal for aggregation profiler with concurrent aggregation (#9017) Signed-off-by: Ticheng Lin --- .../java/org/opensearch/search/profile/ProfileResult.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/opensearch/search/profile/ProfileResult.java b/server/src/main/java/org/opensearch/search/profile/ProfileResult.java index d96db1d2dd8da..62b247735dfdb 100644 --- a/server/src/main/java/org/opensearch/search/profile/ProfileResult.java +++ b/server/src/main/java/org/opensearch/search/profile/ProfileResult.java @@ -134,7 +134,7 @@ public ProfileResult(StreamInput in) throws IOException { breakdown = in.readMap(StreamInput::readString, StreamInput::readLong); debug = in.readMap(StreamInput::readString, StreamInput::readGenericValue); children = in.readList(ProfileResult::new); - if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (in.getVersion().onOrAfter(Version.V_2_10_0)) { this.maxSliceNodeTime = in.readOptionalLong(); this.minSliceNodeTime = in.readOptionalLong(); this.avgSliceNodeTime = in.readOptionalLong(); @@ -153,7 +153,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(breakdown, StreamOutput::writeString, StreamOutput::writeLong); out.writeMap(debug, StreamOutput::writeString, StreamOutput::writeGenericValue); out.writeList(children); - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_10_0)) { out.writeOptionalLong(maxSliceNodeTime); out.writeOptionalLong(minSliceNodeTime); out.writeOptionalLong(avgSliceNodeTime); From 0596bd4444dc2893353af95a8edbe373a5214ffe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Aug 2023 11:27:23 -0400 Subject: [PATCH 37/75] Bump com.maxmind.geoip2:geoip2 from 4.0.1 to 4.1.0 in /modules/ingest-geoip (#8998) * Bump com.maxmind.geoip2:geoip2 in /modules/ingest-geoip Bumps [com.maxmind.geoip2:geoip2](https://github.com/maxmind/GeoIP2-java) from 4.0.1 to 4.1.0. - [Release notes](https://github.com/maxmind/GeoIP2-java/releases) - [Changelog](https://github.com/maxmind/GeoIP2-java/blob/main/CHANGELOG.md) - [Commits](https://github.com/maxmind/GeoIP2-java/compare/v4.0.1...v4.1.0) --- updated-dependencies: - dependency-name: com.maxmind.geoip2:geoip2 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 3 ++- modules/ingest-geoip/build.gradle | 2 +- modules/ingest-geoip/licenses/geoip2-4.0.1.jar.sha1 | 1 - modules/ingest-geoip/licenses/geoip2-4.1.0.jar.sha1 | 1 + 4 files changed, 4 insertions(+), 3 deletions(-) delete mode 100644 modules/ingest-geoip/licenses/geoip2-4.0.1.jar.sha1 create mode 100644 modules/ingest-geoip/licenses/geoip2-4.1.0.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index cf101317bf81d..28080050b2e22 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -96,6 +96,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.gradle.enterprise` from 3.13.3 to 3.14.1 ([#8996](https://github.com/opensearch-project/OpenSearch/pull/8996)) - Bump `org.apache.commons:commons-lang3` from 3.12.0 to 3.13.0 ([#8995](https://github.com/opensearch-project/OpenSearch/pull/8995)) - Bump `com.google.cloud:google-cloud-core-http` from 2.21.0 to 2.21.1 ([#8999](https://github.com/opensearch-project/OpenSearch/pull/8999)) +- Bump `com.maxmind.geoip2:geoip2` from 4.0.1 to 4.1.0 ([#8998](https://github.com/opensearch-project/OpenSearch/pull/8998)) ### Changed - Perform aggregation postCollection in ContextIndexSearcher after searching leaves ([#8303](https://github.com/opensearch-project/OpenSearch/pull/8303)) @@ -117,4 +118,4 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Security [Unreleased 3.0]: https://github.com/opensearch-project/OpenSearch/compare/2.x...HEAD -[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.10...2.x +[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.10...2.x \ No newline at end of file diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index 35b7de8f83164..e126cf37e33a2 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -39,7 +39,7 @@ opensearchplugin { } dependencies { - api('com.maxmind.geoip2:geoip2:4.0.1') + api('com.maxmind.geoip2:geoip2:4.1.0') // geoip2 dependencies: api('com.maxmind.db:maxmind-db:3.0.0') api("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") diff --git a/modules/ingest-geoip/licenses/geoip2-4.0.1.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-4.0.1.jar.sha1 deleted file mode 100644 index 0722ebf08e137..0000000000000 --- a/modules/ingest-geoip/licenses/geoip2-4.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f2a9b0ebd91b73a409a526b4d939f5ab8f4a1a87 \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/geoip2-4.1.0.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-4.1.0.jar.sha1 new file mode 100644 index 0000000000000..0d124299e4cfb --- /dev/null +++ b/modules/ingest-geoip/licenses/geoip2-4.1.0.jar.sha1 @@ -0,0 +1 @@ +b6b356cc91863409ba3475a148ee11a3a6d6aa4b \ No newline at end of file From 86ce02e2edbbe46797fc95f0cbc7bac338ba2a5a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Aug 2023 13:18:09 -0400 Subject: [PATCH 38/75] Bump org.apache.commons:commons-lang3 from 3.12.0 to 3.13.0 in /plugins/repository-hdfs (#8997) * Bump org.apache.commons:commons-lang3 in /plugins/repository-hdfs Bumps org.apache.commons:commons-lang3 from 3.12.0 to 3.13.0. --- updated-dependencies: - dependency-name: org.apache.commons:commons-lang3 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] * Add CHANGELOG.md entry Signed-off-by: Andriy Redko --------- Signed-off-by: dependabot[bot] Signed-off-by: Andriy Redko Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] Co-authored-by: Andriy Redko --- CHANGELOG.md | 3 ++- plugins/repository-hdfs/build.gradle | 2 +- plugins/repository-hdfs/licenses/commons-lang3-3.12.0.jar.sha1 | 1 - plugins/repository-hdfs/licenses/commons-lang3-3.13.0.jar.sha1 | 1 + 4 files changed, 4 insertions(+), 3 deletions(-) delete mode 100644 plugins/repository-hdfs/licenses/commons-lang3-3.12.0.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/commons-lang3-3.13.0.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 28080050b2e22..ddedd5d59ffb2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -97,6 +97,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `org.apache.commons:commons-lang3` from 3.12.0 to 3.13.0 ([#8995](https://github.com/opensearch-project/OpenSearch/pull/8995)) - Bump `com.google.cloud:google-cloud-core-http` from 2.21.0 to 2.21.1 ([#8999](https://github.com/opensearch-project/OpenSearch/pull/8999)) - Bump `com.maxmind.geoip2:geoip2` from 4.0.1 to 4.1.0 ([#8998](https://github.com/opensearch-project/OpenSearch/pull/8998)) +- Bump `org.apache.commons:commons-lang3` from 3.12.0 to 3.13.0 in /plugins/repository-hdfs ([#8997](https://github.com/opensearch-project/OpenSearch/pull/8997)) ### Changed - Perform aggregation postCollection in ContextIndexSearcher after searching leaves ([#8303](https://github.com/opensearch-project/OpenSearch/pull/8303)) @@ -118,4 +119,4 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Security [Unreleased 3.0]: https://github.com/opensearch-project/OpenSearch/compare/2.x...HEAD -[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.10...2.x \ No newline at end of file +[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.10...2.x diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 6626bfccc6662..1fdb3d2fb41e2 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -76,7 +76,7 @@ dependencies { api 'org.apache.commons:commons-compress:1.23.0' api 'org.apache.commons:commons-configuration2:2.9.0' api 'commons-io:commons-io:2.13.0' - api 'org.apache.commons:commons-lang3:3.12.0' + api 'org.apache.commons:commons-lang3:3.13.0' implementation 'com.google.re2j:re2j:1.7' api 'javax.servlet:servlet-api:2.5' api "org.slf4j:slf4j-api:${versions.slf4j}" diff --git a/plugins/repository-hdfs/licenses/commons-lang3-3.12.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-lang3-3.12.0.jar.sha1 deleted file mode 100644 index 9273d8c01aaba..0000000000000 --- a/plugins/repository-hdfs/licenses/commons-lang3-3.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c6842c86792ff03b9f1d1fe2aab8dc23aa6c6f0e \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-lang3-3.13.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-lang3-3.13.0.jar.sha1 new file mode 100644 index 0000000000000..d0c2f2486ee1f --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-lang3-3.13.0.jar.sha1 @@ -0,0 +1 @@ +b7263237aa89c1f99b327197c41d0669707a462e \ No newline at end of file From 22893b12add9f12717f42aa9e718ee16ea442114 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Tue, 1 Aug 2023 12:38:06 -0500 Subject: [PATCH 39/75] [Refactor] Recyclers to Common Library (#9028) This commit refactors the common recylers logic from the server module to the core library to make the base functionality available to concrete implementations across the codebase. This is done to support cloud native or serverless implementations. Until JPM is enabled, the classes remain marked for internal use only. Determining which classes will be exported will dictate if/when these doc labels are switched to API. Signed-off-by: Nicholas Walter Knize --- .../opensearch/common/recycler/AbstractRecycler.java | 0 .../opensearch/common/recycler/AbstractRecyclerC.java | 0 .../common/recycler/ConcurrentDequeRecycler.java | 0 .../org/opensearch/common/recycler/DequeRecycler.java | 0 .../org/opensearch/common/recycler/FilterRecycler.java | 0 .../org/opensearch/common/recycler/NoneRecycler.java | 0 .../java/org/opensearch/common/recycler/Recycler.java | 0 .../java/org/opensearch/common/recycler/Recyclers.java | 4 +++- .../org/opensearch/common/recycler/package-info.java | 10 ++++++++++ .../common/util/concurrent/ConcurrentCollections.java | 0 .../common/util/concurrent/ConcurrentHashMapLong.java | 0 .../common/util/concurrent/ConcurrentMapLong.java | 0 .../common/recycler/AbstractRecyclerTestCase.java | 0 .../common/recycler/ConcurrentRecyclerTests.java | 0 .../common/recycler/LockedRecyclerTests.java | 0 .../opensearch/common/recycler/NoneRecyclerTests.java | 0 .../opensearch/common/recycler/QueueRecyclerTests.java | 0 17 files changed, 13 insertions(+), 1 deletion(-) rename {server => libs/common}/src/main/java/org/opensearch/common/recycler/AbstractRecycler.java (100%) rename {server => libs/common}/src/main/java/org/opensearch/common/recycler/AbstractRecyclerC.java (100%) rename {server => libs/common}/src/main/java/org/opensearch/common/recycler/ConcurrentDequeRecycler.java (100%) rename {server => libs/common}/src/main/java/org/opensearch/common/recycler/DequeRecycler.java (100%) rename {server => libs/common}/src/main/java/org/opensearch/common/recycler/FilterRecycler.java (100%) rename {server => libs/common}/src/main/java/org/opensearch/common/recycler/NoneRecycler.java (100%) rename {server => libs/common}/src/main/java/org/opensearch/common/recycler/Recycler.java (100%) rename {server => libs/common}/src/main/java/org/opensearch/common/recycler/Recyclers.java (98%) create mode 100644 libs/common/src/main/java/org/opensearch/common/recycler/package-info.java rename {server => libs/common}/src/main/java/org/opensearch/common/util/concurrent/ConcurrentCollections.java (100%) rename {server => libs/common}/src/main/java/org/opensearch/common/util/concurrent/ConcurrentHashMapLong.java (100%) rename {server => libs/common}/src/main/java/org/opensearch/common/util/concurrent/ConcurrentMapLong.java (100%) rename {server => libs/common}/src/test/java/org/opensearch/common/recycler/AbstractRecyclerTestCase.java (100%) rename {server => libs/common}/src/test/java/org/opensearch/common/recycler/ConcurrentRecyclerTests.java (100%) rename {server => libs/common}/src/test/java/org/opensearch/common/recycler/LockedRecyclerTests.java (100%) rename {server => libs/common}/src/test/java/org/opensearch/common/recycler/NoneRecyclerTests.java (100%) rename {server => libs/common}/src/test/java/org/opensearch/common/recycler/QueueRecyclerTests.java (100%) diff --git a/server/src/main/java/org/opensearch/common/recycler/AbstractRecycler.java b/libs/common/src/main/java/org/opensearch/common/recycler/AbstractRecycler.java similarity index 100% rename from server/src/main/java/org/opensearch/common/recycler/AbstractRecycler.java rename to libs/common/src/main/java/org/opensearch/common/recycler/AbstractRecycler.java diff --git a/server/src/main/java/org/opensearch/common/recycler/AbstractRecyclerC.java b/libs/common/src/main/java/org/opensearch/common/recycler/AbstractRecyclerC.java similarity index 100% rename from server/src/main/java/org/opensearch/common/recycler/AbstractRecyclerC.java rename to libs/common/src/main/java/org/opensearch/common/recycler/AbstractRecyclerC.java diff --git a/server/src/main/java/org/opensearch/common/recycler/ConcurrentDequeRecycler.java b/libs/common/src/main/java/org/opensearch/common/recycler/ConcurrentDequeRecycler.java similarity index 100% rename from server/src/main/java/org/opensearch/common/recycler/ConcurrentDequeRecycler.java rename to libs/common/src/main/java/org/opensearch/common/recycler/ConcurrentDequeRecycler.java diff --git a/server/src/main/java/org/opensearch/common/recycler/DequeRecycler.java b/libs/common/src/main/java/org/opensearch/common/recycler/DequeRecycler.java similarity index 100% rename from server/src/main/java/org/opensearch/common/recycler/DequeRecycler.java rename to libs/common/src/main/java/org/opensearch/common/recycler/DequeRecycler.java diff --git a/server/src/main/java/org/opensearch/common/recycler/FilterRecycler.java b/libs/common/src/main/java/org/opensearch/common/recycler/FilterRecycler.java similarity index 100% rename from server/src/main/java/org/opensearch/common/recycler/FilterRecycler.java rename to libs/common/src/main/java/org/opensearch/common/recycler/FilterRecycler.java diff --git a/server/src/main/java/org/opensearch/common/recycler/NoneRecycler.java b/libs/common/src/main/java/org/opensearch/common/recycler/NoneRecycler.java similarity index 100% rename from server/src/main/java/org/opensearch/common/recycler/NoneRecycler.java rename to libs/common/src/main/java/org/opensearch/common/recycler/NoneRecycler.java diff --git a/server/src/main/java/org/opensearch/common/recycler/Recycler.java b/libs/common/src/main/java/org/opensearch/common/recycler/Recycler.java similarity index 100% rename from server/src/main/java/org/opensearch/common/recycler/Recycler.java rename to libs/common/src/main/java/org/opensearch/common/recycler/Recycler.java diff --git a/server/src/main/java/org/opensearch/common/recycler/Recyclers.java b/libs/common/src/main/java/org/opensearch/common/recycler/Recyclers.java similarity index 98% rename from server/src/main/java/org/opensearch/common/recycler/Recyclers.java rename to libs/common/src/main/java/org/opensearch/common/recycler/Recyclers.java index 4cbb80509d6a1..52587144369f1 100644 --- a/server/src/main/java/org/opensearch/common/recycler/Recyclers.java +++ b/libs/common/src/main/java/org/opensearch/common/recycler/Recyclers.java @@ -75,6 +75,8 @@ public static Recycler.Factory dequeFactory(final Recycler.C c, final /** * Wrap the provided recycler so that calls to {@link Recycler#obtain()} and {@link Recycler.V#close()} are protected by * a lock. + * + * @opensearch.internal */ public static Recycler locked(final Recycler recycler) { return new FilterRecycler() { @@ -140,7 +142,7 @@ public static Recycler concurrent(final Recycler.Factory factory, fina private final Recycler[] recyclers; { - @SuppressWarnings("unchecked") + @SuppressWarnings({ "rawtypes", "unchecked" }) final Recycler[] recyclers = new Recycler[concurrencyLevel]; this.recyclers = recyclers; for (int i = 0; i < concurrencyLevel; ++i) { diff --git a/libs/common/src/main/java/org/opensearch/common/recycler/package-info.java b/libs/common/src/main/java/org/opensearch/common/recycler/package-info.java new file mode 100644 index 0000000000000..fec3c5d5e52d3 --- /dev/null +++ b/libs/common/src/main/java/org/opensearch/common/recycler/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Common Recycler functionality for recycling objects */ +package org.opensearch.common.recycler; diff --git a/server/src/main/java/org/opensearch/common/util/concurrent/ConcurrentCollections.java b/libs/common/src/main/java/org/opensearch/common/util/concurrent/ConcurrentCollections.java similarity index 100% rename from server/src/main/java/org/opensearch/common/util/concurrent/ConcurrentCollections.java rename to libs/common/src/main/java/org/opensearch/common/util/concurrent/ConcurrentCollections.java diff --git a/server/src/main/java/org/opensearch/common/util/concurrent/ConcurrentHashMapLong.java b/libs/common/src/main/java/org/opensearch/common/util/concurrent/ConcurrentHashMapLong.java similarity index 100% rename from server/src/main/java/org/opensearch/common/util/concurrent/ConcurrentHashMapLong.java rename to libs/common/src/main/java/org/opensearch/common/util/concurrent/ConcurrentHashMapLong.java diff --git a/server/src/main/java/org/opensearch/common/util/concurrent/ConcurrentMapLong.java b/libs/common/src/main/java/org/opensearch/common/util/concurrent/ConcurrentMapLong.java similarity index 100% rename from server/src/main/java/org/opensearch/common/util/concurrent/ConcurrentMapLong.java rename to libs/common/src/main/java/org/opensearch/common/util/concurrent/ConcurrentMapLong.java diff --git a/server/src/test/java/org/opensearch/common/recycler/AbstractRecyclerTestCase.java b/libs/common/src/test/java/org/opensearch/common/recycler/AbstractRecyclerTestCase.java similarity index 100% rename from server/src/test/java/org/opensearch/common/recycler/AbstractRecyclerTestCase.java rename to libs/common/src/test/java/org/opensearch/common/recycler/AbstractRecyclerTestCase.java diff --git a/server/src/test/java/org/opensearch/common/recycler/ConcurrentRecyclerTests.java b/libs/common/src/test/java/org/opensearch/common/recycler/ConcurrentRecyclerTests.java similarity index 100% rename from server/src/test/java/org/opensearch/common/recycler/ConcurrentRecyclerTests.java rename to libs/common/src/test/java/org/opensearch/common/recycler/ConcurrentRecyclerTests.java diff --git a/server/src/test/java/org/opensearch/common/recycler/LockedRecyclerTests.java b/libs/common/src/test/java/org/opensearch/common/recycler/LockedRecyclerTests.java similarity index 100% rename from server/src/test/java/org/opensearch/common/recycler/LockedRecyclerTests.java rename to libs/common/src/test/java/org/opensearch/common/recycler/LockedRecyclerTests.java diff --git a/server/src/test/java/org/opensearch/common/recycler/NoneRecyclerTests.java b/libs/common/src/test/java/org/opensearch/common/recycler/NoneRecyclerTests.java similarity index 100% rename from server/src/test/java/org/opensearch/common/recycler/NoneRecyclerTests.java rename to libs/common/src/test/java/org/opensearch/common/recycler/NoneRecyclerTests.java diff --git a/server/src/test/java/org/opensearch/common/recycler/QueueRecyclerTests.java b/libs/common/src/test/java/org/opensearch/common/recycler/QueueRecyclerTests.java similarity index 100% rename from server/src/test/java/org/opensearch/common/recycler/QueueRecyclerTests.java rename to libs/common/src/test/java/org/opensearch/common/recycler/QueueRecyclerTests.java From b17396a17579a5023f8b3bf1ac2c2d8a48838479 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Tue, 1 Aug 2023 14:40:35 -0400 Subject: [PATCH 40/75] Bump netty from 4.1.94.Final to 4.1.96.Final (#9030) Signed-off-by: Andriy Redko --- CHANGELOG.md | 1 + buildSrc/version.properties | 2 +- .../licenses/netty-buffer-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-buffer-4.1.96.Final.jar.sha1 | 1 + .../transport-netty4/licenses/netty-codec-4.1.94.Final.jar.sha1 | 1 - .../transport-netty4/licenses/netty-codec-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-codec-http-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-codec-http-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-codec-http2-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-codec-http2-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-common-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-common-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-handler-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-handler-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-resolver-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-resolver-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-transport-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-transport-4.1.96.Final.jar.sha1 | 1 + .../netty-transport-native-unix-common-4.1.94.Final.jar.sha1 | 1 - .../netty-transport-native-unix-common-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-codec-dns-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-codec-dns-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-codec-http2-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-codec-http2-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-codec-socks-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-codec-socks-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-handler-proxy-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-handler-proxy-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-resolver-dns-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-resolver-dns-4.1.96.Final.jar.sha1 | 1 + .../netty-transport-native-unix-common-4.1.94.Final.jar.sha1 | 1 - .../netty-transport-native-unix-common-4.1.96.Final.jar.sha1 | 1 + .../repository-hdfs/licenses/netty-all-4.1.94.Final.jar.sha1 | 1 - .../repository-hdfs/licenses/netty-all-4.1.96.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-buffer-4.1.94.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-buffer-4.1.96.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-codec-4.1.94.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-codec-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-codec-http-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-codec-http-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-codec-http2-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-codec-http2-4.1.96.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-common-4.1.94.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-common-4.1.96.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-handler-4.1.94.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-handler-4.1.96.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-resolver-4.1.94.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-resolver-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-transport-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-transport-4.1.96.Final.jar.sha1 | 1 + .../netty-transport-classes-epoll-4.1.94.Final.jar.sha1 | 1 - .../netty-transport-classes-epoll-4.1.96.Final.jar.sha1 | 1 + .../netty-transport-native-unix-common-4.1.94.Final.jar.sha1 | 1 - .../netty-transport-native-unix-common-4.1.96.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-buffer-4.1.94.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-buffer-4.1.96.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-codec-4.1.94.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-codec-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-codec-http-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-codec-http-4.1.96.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-common-4.1.94.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-common-4.1.96.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-handler-4.1.94.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-handler-4.1.96.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-resolver-4.1.94.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-resolver-4.1.96.Final.jar.sha1 | 1 + .../licenses/netty-transport-4.1.94.Final.jar.sha1 | 1 - .../licenses/netty-transport-4.1.96.Final.jar.sha1 | 1 + 68 files changed, 35 insertions(+), 34 deletions(-) delete mode 100644 modules/transport-netty4/licenses/netty-buffer-4.1.94.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-buffer-4.1.96.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-codec-4.1.94.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-codec-4.1.96.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-codec-http-4.1.94.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-codec-http-4.1.96.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-codec-http2-4.1.94.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-codec-http2-4.1.96.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-common-4.1.94.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-common-4.1.96.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-handler-4.1.94.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-handler-4.1.96.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-resolver-4.1.94.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-resolver-4.1.96.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-transport-4.1.94.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-transport-4.1.96.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-codec-dns-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-codec-dns-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-codec-http2-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-codec-http2-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-codec-socks-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-codec-socks-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-handler-proxy-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-handler-proxy-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-resolver-dns-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-resolver-dns-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-hdfs/licenses/netty-all-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/netty-all-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-buffer-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-buffer-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-codec-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-codec-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-codec-http-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-codec-http-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-codec-http2-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-codec-http2-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-common-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-common-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-handler-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-handler-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-resolver-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-resolver-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-transport-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-transport-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.96.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-buffer-4.1.94.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-buffer-4.1.96.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-codec-4.1.94.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-codec-4.1.96.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-codec-http-4.1.94.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-codec-http-4.1.96.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-common-4.1.94.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-common-4.1.96.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-handler-4.1.94.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-handler-4.1.96.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-resolver-4.1.94.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-resolver-4.1.96.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-transport-4.1.94.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-transport-4.1.96.Final.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index ddedd5d59ffb2..a37976462b38e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -98,6 +98,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.google.cloud:google-cloud-core-http` from 2.21.0 to 2.21.1 ([#8999](https://github.com/opensearch-project/OpenSearch/pull/8999)) - Bump `com.maxmind.geoip2:geoip2` from 4.0.1 to 4.1.0 ([#8998](https://github.com/opensearch-project/OpenSearch/pull/8998)) - Bump `org.apache.commons:commons-lang3` from 3.12.0 to 3.13.0 in /plugins/repository-hdfs ([#8997](https://github.com/opensearch-project/OpenSearch/pull/8997)) +- Bump `netty` from 4.1.94.Final to 4.1.96.Final ([#9030](https://github.com/opensearch-project/OpenSearch/pull/9030)) ### Changed - Perform aggregation postCollection in ContextIndexSearcher after searching leaves ([#8303](https://github.com/opensearch-project/OpenSearch/pull/8303)) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index e4a9293c59b8f..53fcd63944d6a 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -28,7 +28,7 @@ jakarta_annotation = 1.3.5 # when updating the JNA version, also update the version in buildSrc/build.gradle jna = 5.5.0 -netty = 4.1.94.Final +netty = 4.1.96.Final joda = 2.12.2 # client dependencies diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.94.Final.jar.sha1 deleted file mode 100644 index 05b1c2a4d614e..0000000000000 --- a/modules/transport-netty4/licenses/netty-buffer-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eec248b26f16e888688e5bb37b7eeda76b78d2f7 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..7abdb33dc79a2 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-buffer-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +4b80fffbe77485b457bf844289bf1801f61b9e91 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.94.Final.jar.sha1 deleted file mode 100644 index baa7e25f1ac49..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c70ef20ca338558147887df60f46341bc47f6900 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..8fdb32be1de0b --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +9cfe430f8b14e7ba86969d8e1126aa0aae4d18f0 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.94.Final.jar.sha1 deleted file mode 100644 index 8c018be2565e5..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-http-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9e5404764092c1f6305ad5719078f46ab228d587 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..dfb0cf39463e2 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +a4d0d95df5026965c454902ef3d6d84b81f89626 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.94.Final.jar.sha1 deleted file mode 100644 index e73026b412972..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-http2-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f651595784d6cca4cbca6a8ad74c48fceed6cea8 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..2fc787ee65197 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http2-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +cc8baf4ff67c1bcc0cde60bc5c2bb9447d92d9e6 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.94.Final.jar.sha1 deleted file mode 100644 index b787338551ede..0000000000000 --- a/modules/transport-netty4/licenses/netty-common-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ad4ecf779ebc794cd351f57792f56ea01387b868 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..85b5f52749671 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-common-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +d10c167623cbc471753f950846df241d1021655c \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.94.Final.jar.sha1 deleted file mode 100644 index b08e85ba7adf8..0000000000000 --- a/modules/transport-netty4/licenses/netty-handler-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cd9121ce24d6d3f2898946d04b0ef3ec548b00b4 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..fe4f48c68e78b --- /dev/null +++ b/modules/transport-netty4/licenses/netty-handler-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +7840d7523d709e02961b647546f9d9dde1699306 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.94.Final.jar.sha1 deleted file mode 100644 index 4c9e4dda2b852..0000000000000 --- a/modules/transport-netty4/licenses/netty-resolver-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e96f649e8e9dcb29a1f8e95328b99c9eb6cf76c2 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..9e93f013226cd --- /dev/null +++ b/modules/transport-netty4/licenses/netty-resolver-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +0e51db5568a881e0f9b013b35617c597dc32f130 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.94.Final.jar.sha1 deleted file mode 100644 index ed7760b8e15d1..0000000000000 --- a/modules/transport-netty4/licenses/netty-transport-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ec783a737f96991a87b1d5794e2f9eb2024d708a \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..707285d3d29c3 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +dbd15ca244be28e1a98ed29b9d755edbfa737e02 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1 deleted file mode 100644 index 43bc960a347a1..0000000000000 --- a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3fa5f9d04b6b782d869d6e0657d896eeadca5866 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..e911c47d5ab1a --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +daf8578cade63a01525ee9d70371fa78e6e91094 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.94.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.94.Final.jar.sha1 deleted file mode 100644 index 670bd4c98a044..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-dns-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9180660dc8479e1594b60b02fc27404af0ea43a6 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.96.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..42d5e60ce9d45 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-dns-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +afd90dc0e164be74b4a3e1a899890557fce98567 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.94.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.94.Final.jar.sha1 deleted file mode 100644 index e73026b412972..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-http2-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f651595784d6cca4cbca6a8ad74c48fceed6cea8 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.96.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..2fc787ee65197 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-http2-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +cc8baf4ff67c1bcc0cde60bc5c2bb9447d92d9e6 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.94.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.94.Final.jar.sha1 deleted file mode 100644 index de2c4d00aef09..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-socks-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b9192c7cda295d75f236a13a0b1f5a008f05d516 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.96.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..8e959bdac5079 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-socks-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +f53c52dbddaa4a02a51430405792d3f30a89b147 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.94.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.94.Final.jar.sha1 deleted file mode 100644 index a2db8bece8f6f..0000000000000 --- a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -26ba9d30b8f7b095155b9ac63378d6d9386d85c3 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.96.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..d410208dada90 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +dcabd63f4aaec2b4cad7588bfdd4cd2c82287e38 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.94.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.94.Final.jar.sha1 deleted file mode 100644 index 2fa927b3b77ba..0000000000000 --- a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -25bbe90e10685ce63c32bd0db56574cffffa28de \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.96.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..5041cf5473505 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +0095023cc667af76578c9be326a6d54e3e1de52c \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1 deleted file mode 100644 index 43bc960a347a1..0000000000000 --- a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3fa5f9d04b6b782d869d6e0657d896eeadca5866 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..e911c47d5ab1a --- /dev/null +++ b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +daf8578cade63a01525ee9d70371fa78e6e91094 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.94.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.94.Final.jar.sha1 deleted file mode 100644 index 6766770f61e78..0000000000000 --- a/plugins/repository-hdfs/licenses/netty-all-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2a7df0424eed81818157f22613f36b72487ceb34 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.96.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..32ced5451cfb6 --- /dev/null +++ b/plugins/repository-hdfs/licenses/netty-all-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +2145ec747511965e4a57099767654cf9083ce8a7 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-buffer-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-buffer-4.1.94.Final.jar.sha1 deleted file mode 100644 index 05b1c2a4d614e..0000000000000 --- a/plugins/repository-s3/licenses/netty-buffer-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eec248b26f16e888688e5bb37b7eeda76b78d2f7 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-buffer-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-buffer-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..7abdb33dc79a2 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-buffer-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +4b80fffbe77485b457bf844289bf1801f61b9e91 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-4.1.94.Final.jar.sha1 deleted file mode 100644 index baa7e25f1ac49..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c70ef20ca338558147887df60f46341bc47f6900 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..8fdb32be1de0b --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +9cfe430f8b14e7ba86969d8e1126aa0aae4d18f0 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http-4.1.94.Final.jar.sha1 deleted file mode 100644 index 8c018be2565e5..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-http-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9e5404764092c1f6305ad5719078f46ab228d587 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..dfb0cf39463e2 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-http-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +a4d0d95df5026965c454902ef3d6d84b81f89626 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http2-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http2-4.1.94.Final.jar.sha1 deleted file mode 100644 index e73026b412972..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-http2-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f651595784d6cca4cbca6a8ad74c48fceed6cea8 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http2-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http2-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..2fc787ee65197 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-http2-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +cc8baf4ff67c1bcc0cde60bc5c2bb9447d92d9e6 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-common-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-common-4.1.94.Final.jar.sha1 deleted file mode 100644 index b787338551ede..0000000000000 --- a/plugins/repository-s3/licenses/netty-common-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ad4ecf779ebc794cd351f57792f56ea01387b868 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-common-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-common-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..85b5f52749671 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-common-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +d10c167623cbc471753f950846df241d1021655c \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-handler-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-handler-4.1.94.Final.jar.sha1 deleted file mode 100644 index b08e85ba7adf8..0000000000000 --- a/plugins/repository-s3/licenses/netty-handler-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cd9121ce24d6d3f2898946d04b0ef3ec548b00b4 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-handler-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-handler-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..fe4f48c68e78b --- /dev/null +++ b/plugins/repository-s3/licenses/netty-handler-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +7840d7523d709e02961b647546f9d9dde1699306 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-resolver-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-resolver-4.1.94.Final.jar.sha1 deleted file mode 100644 index 4c9e4dda2b852..0000000000000 --- a/plugins/repository-s3/licenses/netty-resolver-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e96f649e8e9dcb29a1f8e95328b99c9eb6cf76c2 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-resolver-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-resolver-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..9e93f013226cd --- /dev/null +++ b/plugins/repository-s3/licenses/netty-resolver-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +0e51db5568a881e0f9b013b35617c597dc32f130 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-4.1.94.Final.jar.sha1 deleted file mode 100644 index ed7760b8e15d1..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ec783a737f96991a87b1d5794e2f9eb2024d708a \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..707285d3d29c3 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +dbd15ca244be28e1a98ed29b9d755edbfa737e02 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.94.Final.jar.sha1 deleted file mode 100644 index 72a392ea2917d..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -240e36cd5c2ffaf655913f8857f2d58b26394679 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..58564d9da4b27 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +b0369501645f6e71f89ff7f77b5c5f52510a2e31 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1 deleted file mode 100644 index 43bc960a347a1..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3fa5f9d04b6b782d869d6e0657d896eeadca5866 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..e911c47d5ab1a --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +daf8578cade63a01525ee9d70371fa78e6e91094 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.94.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.94.Final.jar.sha1 deleted file mode 100644 index 05b1c2a4d614e..0000000000000 --- a/plugins/transport-nio/licenses/netty-buffer-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eec248b26f16e888688e5bb37b7eeda76b78d2f7 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.96.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..7abdb33dc79a2 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-buffer-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +4b80fffbe77485b457bf844289bf1801f61b9e91 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.94.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.94.Final.jar.sha1 deleted file mode 100644 index baa7e25f1ac49..0000000000000 --- a/plugins/transport-nio/licenses/netty-codec-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c70ef20ca338558147887df60f46341bc47f6900 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.96.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..8fdb32be1de0b --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +9cfe430f8b14e7ba86969d8e1126aa0aae4d18f0 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.94.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.94.Final.jar.sha1 deleted file mode 100644 index 8c018be2565e5..0000000000000 --- a/plugins/transport-nio/licenses/netty-codec-http-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9e5404764092c1f6305ad5719078f46ab228d587 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.96.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..dfb0cf39463e2 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-http-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +a4d0d95df5026965c454902ef3d6d84b81f89626 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.94.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.94.Final.jar.sha1 deleted file mode 100644 index b787338551ede..0000000000000 --- a/plugins/transport-nio/licenses/netty-common-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ad4ecf779ebc794cd351f57792f56ea01387b868 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.96.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..85b5f52749671 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-common-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +d10c167623cbc471753f950846df241d1021655c \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.94.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.94.Final.jar.sha1 deleted file mode 100644 index b08e85ba7adf8..0000000000000 --- a/plugins/transport-nio/licenses/netty-handler-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cd9121ce24d6d3f2898946d04b0ef3ec548b00b4 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.96.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..fe4f48c68e78b --- /dev/null +++ b/plugins/transport-nio/licenses/netty-handler-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +7840d7523d709e02961b647546f9d9dde1699306 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.94.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.94.Final.jar.sha1 deleted file mode 100644 index 4c9e4dda2b852..0000000000000 --- a/plugins/transport-nio/licenses/netty-resolver-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e96f649e8e9dcb29a1f8e95328b99c9eb6cf76c2 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.96.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..9e93f013226cd --- /dev/null +++ b/plugins/transport-nio/licenses/netty-resolver-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +0e51db5568a881e0f9b013b35617c597dc32f130 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.94.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.94.Final.jar.sha1 deleted file mode 100644 index ed7760b8e15d1..0000000000000 --- a/plugins/transport-nio/licenses/netty-transport-4.1.94.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ec783a737f96991a87b1d5794e2f9eb2024d708a \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.96.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.96.Final.jar.sha1 new file mode 100644 index 0000000000000..707285d3d29c3 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-transport-4.1.96.Final.jar.sha1 @@ -0,0 +1 @@ +dbd15ca244be28e1a98ed29b9d755edbfa737e02 \ No newline at end of file From cc641eb30ea6cde1708a7d8edac5764ce03d0a20 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Tue, 1 Aug 2023 15:12:03 -0500 Subject: [PATCH 41/75] [BWC] Change BWC version check for MediaType (#9035) This commit changes the bwc version check for serializing MediaTypes over the transport wire after backporting changes to 2.x. Signed-off-by: Nicholas Walter Knize --- .../admin/cluster/storedscripts/PutStoredScriptRequest.java | 4 ++-- .../main/java/org/opensearch/action/index/IndexRequest.java | 4 ++-- .../java/org/opensearch/action/ingest/PutPipelineRequest.java | 4 ++-- .../org/opensearch/action/ingest/SimulatePipelineRequest.java | 4 ++-- .../opensearch/action/search/PutSearchPipelineRequest.java | 4 ++-- .../org/opensearch/action/termvectors/TermVectorsRequest.java | 4 ++-- .../org/opensearch/extensions/rest/ExtensionRestRequest.java | 4 ++-- .../org/opensearch/index/query/MoreLikeThisQueryBuilder.java | 4 ++-- .../java/org/opensearch/ingest/PipelineConfiguration.java | 4 ++-- .../org/opensearch/search/pipeline/PipelineConfiguration.java | 4 ++-- 10 files changed, 20 insertions(+), 20 deletions(-) diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java index 761373a001ffe..8b328bc3879dd 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java @@ -68,7 +68,7 @@ public PutStoredScriptRequest(StreamInput in) throws IOException { super(in); id = in.readOptionalString(); content = in.readBytesReference(); - if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (in.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType = in.readMediaType(); } else { mediaType = in.readEnum(XContentType.class); @@ -152,7 +152,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeOptionalString(id); out.writeBytesReference(content); - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType.writeTo(out); } else { out.writeEnum((XContentType) mediaType); diff --git a/server/src/main/java/org/opensearch/action/index/IndexRequest.java b/server/src/main/java/org/opensearch/action/index/IndexRequest.java index ac4c8436aab5a..584bee0caaf2e 100644 --- a/server/src/main/java/org/opensearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/opensearch/action/index/IndexRequest.java @@ -159,7 +159,7 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio isRetry = in.readBoolean(); autoGeneratedTimestamp = in.readLong(); if (in.readBoolean()) { - if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (in.getVersion().onOrAfter(Version.V_2_10_0)) { contentType = in.readMediaType(); } else { contentType = in.readEnum(XContentType.class); @@ -670,7 +670,7 @@ private void writeBody(StreamOutput out) throws IOException { out.writeLong(autoGeneratedTimestamp); if (contentType != null) { out.writeBoolean(true); - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_10_0)) { contentType.writeTo(out); } else { out.writeEnum((XContentType) contentType); diff --git a/server/src/main/java/org/opensearch/action/ingest/PutPipelineRequest.java b/server/src/main/java/org/opensearch/action/ingest/PutPipelineRequest.java index 7a88f817c70bf..f764e4b23860a 100644 --- a/server/src/main/java/org/opensearch/action/ingest/PutPipelineRequest.java +++ b/server/src/main/java/org/opensearch/action/ingest/PutPipelineRequest.java @@ -70,7 +70,7 @@ public PutPipelineRequest(StreamInput in) throws IOException { super(in); id = in.readString(); source = in.readBytesReference(); - if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (in.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType = in.readMediaType(); } else { mediaType = in.readEnum(XContentType.class); @@ -101,7 +101,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(id); out.writeBytesReference(source); - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType.writeTo(out); } else { out.writeEnum((XContentType) mediaType); diff --git a/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java b/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java index 1ac441a1afe64..4837cfdd492b4 100644 --- a/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java +++ b/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java @@ -85,7 +85,7 @@ public SimulatePipelineRequest(BytesReference source, MediaType mediaType) { id = in.readOptionalString(); verbose = in.readBoolean(); source = in.readBytesReference(); - if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (in.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType = in.readMediaType(); } else { mediaType = in.readEnum(XContentType.class); @@ -127,7 +127,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(id); out.writeBoolean(verbose); out.writeBytesReference(source); - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType.writeTo(out); } else { out.writeEnum((XContentType) mediaType); diff --git a/server/src/main/java/org/opensearch/action/search/PutSearchPipelineRequest.java b/server/src/main/java/org/opensearch/action/search/PutSearchPipelineRequest.java index d32aab0c8a561..d0484b3a69a1e 100644 --- a/server/src/main/java/org/opensearch/action/search/PutSearchPipelineRequest.java +++ b/server/src/main/java/org/opensearch/action/search/PutSearchPipelineRequest.java @@ -47,7 +47,7 @@ public PutSearchPipelineRequest(StreamInput in) throws IOException { super(in); id = in.readString(); source = in.readBytesReference(); - if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (in.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType = in.readMediaType(); } else { mediaType = in.readEnum(XContentType.class); @@ -76,7 +76,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(id); out.writeBytesReference(source); - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType.writeTo(out); } else { out.writeEnum((XContentType) mediaType); diff --git a/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java b/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java index 57cc4698cefce..f15a039fd9305 100644 --- a/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java +++ b/server/src/main/java/org/opensearch/action/termvectors/TermVectorsRequest.java @@ -186,7 +186,7 @@ public TermVectorsRequest() {} if (in.readBoolean()) { doc = in.readBytesReference(); - if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (in.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType = in.readMediaType(); } else { mediaType = in.readEnum(XContentType.class); @@ -538,7 +538,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(doc != null); if (doc != null) { out.writeBytesReference(doc); - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType.writeTo(out); } else { out.writeEnum((XContentType) mediaType); diff --git a/server/src/main/java/org/opensearch/extensions/rest/ExtensionRestRequest.java b/server/src/main/java/org/opensearch/extensions/rest/ExtensionRestRequest.java index e6df6e964a31b..8b13061def8d0 100644 --- a/server/src/main/java/org/opensearch/extensions/rest/ExtensionRestRequest.java +++ b/server/src/main/java/org/opensearch/extensions/rest/ExtensionRestRequest.java @@ -104,7 +104,7 @@ public ExtensionRestRequest(StreamInput in) throws IOException { params = in.readMap(StreamInput::readString, StreamInput::readString); headers = in.readMap(StreamInput::readString, StreamInput::readStringList); if (in.readBoolean()) { - if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (in.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType = in.readMediaType(); } else { mediaType = in.readEnum(XContentType.class); @@ -125,7 +125,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(headers, StreamOutput::writeString, StreamOutput::writeStringCollection); out.writeBoolean(mediaType != null); if (mediaType != null) { - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType.writeTo(out); } else { out.writeEnum((XContentType) mediaType); diff --git a/server/src/main/java/org/opensearch/index/query/MoreLikeThisQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/MoreLikeThisQueryBuilder.java index 7287634ecfacb..13ed1859d3622 100644 --- a/server/src/main/java/org/opensearch/index/query/MoreLikeThisQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/MoreLikeThisQueryBuilder.java @@ -235,7 +235,7 @@ public Item(@Nullable String index, XContentBuilder doc) { } if (in.readBoolean()) { doc = (BytesReference) in.readGenericValue(); - if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (in.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType = in.readMediaType(); } else { mediaType = in.readEnum(XContentType.class); @@ -260,7 +260,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(doc != null); if (doc != null) { out.writeGenericValue(doc); - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType.writeTo(out); } else { out.writeEnum((XContentType) mediaType); diff --git a/server/src/main/java/org/opensearch/ingest/PipelineConfiguration.java b/server/src/main/java/org/opensearch/ingest/PipelineConfiguration.java index 04892e4653065..3b918592053c1 100644 --- a/server/src/main/java/org/opensearch/ingest/PipelineConfiguration.java +++ b/server/src/main/java/org/opensearch/ingest/PipelineConfiguration.java @@ -141,7 +141,7 @@ public static PipelineConfiguration readFrom(StreamInput in) throws IOException return new PipelineConfiguration( in.readString(), in.readBytesReference(), - in.getVersion().onOrAfter(Version.V_3_0_0) ? in.readMediaType() : in.readEnum(XContentType.class) + in.getVersion().onOrAfter(Version.V_2_10_0) ? in.readMediaType() : in.readEnum(XContentType.class) ); } @@ -158,7 +158,7 @@ public String toString() { public void writeTo(StreamOutput out) throws IOException { out.writeString(id); out.writeBytesReference(config); - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType.writeTo(out); } else { out.writeEnum((XContentType) mediaType); diff --git a/server/src/main/java/org/opensearch/search/pipeline/PipelineConfiguration.java b/server/src/main/java/org/opensearch/search/pipeline/PipelineConfiguration.java index b4f6549c83390..33686139039b9 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/PipelineConfiguration.java +++ b/server/src/main/java/org/opensearch/search/pipeline/PipelineConfiguration.java @@ -120,7 +120,7 @@ public static PipelineConfiguration readFrom(StreamInput in) throws IOException return new PipelineConfiguration( in.readString(), in.readBytesReference(), - in.getVersion().onOrAfter(Version.V_3_0_0) ? in.readMediaType() : in.readEnum(XContentType.class) + in.getVersion().onOrAfter(Version.V_2_10_0) ? in.readMediaType() : in.readEnum(XContentType.class) ); } @@ -137,7 +137,7 @@ public String toString() { public void writeTo(StreamOutput out) throws IOException { out.writeString(id); out.writeBytesReference(config); - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_10_0)) { mediaType.writeTo(out); } else { out.writeEnum((XContentType) mediaType); From 85f918e3fe3e78b7e004b277645f799e06d95593 Mon Sep 17 00:00:00 2001 From: Sayali Gaikawad <61760125+gaiksaya@users.noreply.github.com> Date: Tue, 1 Aug 2023 15:27:20 -0700 Subject: [PATCH 42/75] Add workflow to check compatibility (#8486) Signed-off-by: Sayali Gaikawad --- .github/workflows/check-compatibility.yml | 35 +++++++++++++++++++ .../gradle/CheckCompatibilityTask.groovy | 14 ++++++-- 2 files changed, 46 insertions(+), 3 deletions(-) create mode 100644 .github/workflows/check-compatibility.yml diff --git a/.github/workflows/check-compatibility.yml b/.github/workflows/check-compatibility.yml new file mode 100644 index 0000000000000..b208fe38a581f --- /dev/null +++ b/.github/workflows/check-compatibility.yml @@ -0,0 +1,35 @@ +--- +name: Check Compatibility + +on: + pull_request_target + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Run compatibility task + run: ./gradlew checkCompatibility | tee $HOME/gradlew-check.out + + - name: Get results + run: | + echo 'Compatibility status:' > ${{ github.workspace }}/results.txt && echo '```' >> ${{ github.workspace }}/results.txt + grep -e 'Compatible components' -e 'Incompatible components' -e 'Components skipped' -A 2 -B 3 $HOME/gradlew-check.out >> "${{ github.workspace }}/results.txt" + echo '```' >> ${{ github.workspace }}/results.txt + + - name: GitHub App token + id: github_app_token + uses: tibdex/github-app-token@v1.6.0 + with: + app_id: ${{ secrets.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + installation_id: 22958780 + + - name: Add comment on the PR + uses: peter-evans/create-or-update-comment@v3 + with: + token: ${{ steps.github_app_token.outputs.token }} + issue-number: ${{ github.event.number }} + body-path: "${{ github.workspace }}/results.txt" diff --git a/buildSrc/src/main/groovy/org/opensearch/gradle/CheckCompatibilityTask.groovy b/buildSrc/src/main/groovy/org/opensearch/gradle/CheckCompatibilityTask.groovy index ee6446fec6d57..b95bb3be22f8b 100644 --- a/buildSrc/src/main/groovy/org/opensearch/gradle/CheckCompatibilityTask.groovy +++ b/buildSrc/src/main/groovy/org/opensearch/gradle/CheckCompatibilityTask.groovy @@ -40,8 +40,8 @@ class CheckCompatibilityTask extends DefaultTask { @TaskAction void checkCompatibility() { - logger.info("Checking compatibility for: $repositoryUrls for $ref") repositoryUrls.parallelStream().forEach { repositoryUrl -> + logger.lifecycle("Checking compatibility for: $repositoryUrl with ref: $ref") def tempDir = File.createTempDir() try { if (cloneAndCheckout(repositoryUrl, tempDir)) { @@ -81,8 +81,16 @@ class CheckCompatibilityTask extends DefaultTask { protected static List getRepoUrls() { def json = new JsonSlurper().parse(REPO_URL.toURL()) - def labels = json.projects.values() - return labels as List + def repository = json.projects.values() + def repoUrls = replaceSshWithHttps(repository as List) + return repoUrls + } + + protected static replaceSshWithHttps(List repoList) { + repoList.replaceAll { element -> + element.replace("git@github.com:", "https://github.com/") + } + return repoList } protected boolean cloneAndCheckout(repoUrl, directory) { From f9158f3fe5395159fc12bbfb66fe9c23f62f9529 Mon Sep 17 00:00:00 2001 From: Kunal Kotwani Date: Tue, 1 Aug 2023 15:38:32 -0700 Subject: [PATCH 43/75] Update BwC version for file cache safeguards (#9038) Signed-off-by: Kunal Kotwani --- server/src/main/java/org/opensearch/cluster/ClusterInfo.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/opensearch/cluster/ClusterInfo.java b/server/src/main/java/org/opensearch/cluster/ClusterInfo.java index 7b1a2f0f12b69..1513dc4e5acf7 100644 --- a/server/src/main/java/org/opensearch/cluster/ClusterInfo.java +++ b/server/src/main/java/org/opensearch/cluster/ClusterInfo.java @@ -110,7 +110,7 @@ public ClusterInfo(StreamInput in) throws IOException { this.shardSizes = Collections.unmodifiableMap(sizeMap); this.routingToDataPath = Collections.unmodifiableMap(routingMap); this.reservedSpace = Collections.unmodifiableMap(reservedSpaceMap); - if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (in.getVersion().onOrAfter(Version.V_2_10_0)) { this.nodeFileCacheStats = in.readMap(StreamInput::readString, FileCacheStats::new); } else { this.nodeFileCacheStats = Map.of(); @@ -124,7 +124,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(this.shardSizes, StreamOutput::writeString, (o, v) -> out.writeLong(v == null ? -1 : v)); out.writeMap(this.routingToDataPath, (o, k) -> k.writeTo(o), StreamOutput::writeString); out.writeMap(this.reservedSpace, (o, v) -> v.writeTo(o), (o, v) -> v.writeTo(o)); - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_10_0)) { out.writeMap(this.nodeFileCacheStats, StreamOutput::writeString, (o, v) -> v.writeTo(o)); } } From 11c84264ca079e0999008ef0a75ab1e2d10e222b Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Tue, 1 Aug 2023 19:57:33 -0500 Subject: [PATCH 44/75] [BWC] Change BWC version check for MediaType in PercolateQueryBuilder (#9053) This changes the version check for MediaType in PercolateQueryBuilder for BWC. Signed-off-by: Nicholas Walter Knize --- .../java/org/opensearch/percolator/PercolateQueryBuilder.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java index 08d9a4855c473..9f49843c37ea5 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java @@ -254,7 +254,7 @@ protected PercolateQueryBuilder(String field, Supplier documentS } documents = in.readList(StreamInput::readBytesReference); if (documents.isEmpty() == false) { - if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (in.getVersion().onOrAfter(Version.V_2_10_0)) { documentXContentType = in.readMediaType(); } else { documentXContentType = in.readEnum(XContentType.class); @@ -304,7 +304,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeBytesReference(document); } if (documents.isEmpty() == false) { - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_10_0)) { documentXContentType.writeTo(out); } else { out.writeEnum((XContentType) documentXContentType); From 57eb105a6a156e3a313efd58e267d772ac178eea Mon Sep 17 00:00:00 2001 From: Suraj Singh Date: Tue, 1 Aug 2023 23:17:39 -0700 Subject: [PATCH 45/75] [Segment Replication] Refactor RemoteStoreReplicationSource (#8767) * [Segment Replication] Refactor remote replication source Signed-off-by: Suraj Singh * Unit test updates Signed-off-by: Suraj Singh * Self review Signed-off-by: Suraj Singh * Self review Signed-off-by: Suraj Singh * Segregate shard level tests for node to node and remote store segment replication Signed-off-by: Suraj Singh * Fix failing unit tests Signed-off-by: Suraj Singh * Fix failing UT Signed-off-by: Suraj Singh * Fix failing UT Signed-off-by: Suraj Singh * Address review comments Signed-off-by: Suraj Singh * Fix more unit tests Signed-off-by: Suraj Singh * Improve RemoteStoreReplicationSourceTests, remove unnecessary mocks and use actual failures for failure/exception use cases Signed-off-by: Suraj Singh * Spotless check fix Signed-off-by: Suraj Singh * Address review comments Signed-off-by: Suraj Singh * Ignore files already in store while computing segment file diff with primary Signed-off-by: Suraj Singh * Spotless fix Signed-off-by: Suraj Singh * Fix failing UT Signed-off-by: Suraj Singh * Spotless fix Signed-off-by: Suraj Singh * Move read/writes from IndexInput/Output to RemoteSegmentMetadata Signed-off-by: Suraj Singh * Address review commnt Signed-off-by: Suraj Singh * Update recovery flow to perform commits during recovery Signed-off-by: Suraj Singh * Remove un-necessary char Signed-off-by: Suraj Singh * Address review comments Signed-off-by: Suraj Singh * Update comment nit-pick Signed-off-by: Suraj Singh * Remove deletion logic causing read issues due to deleted segments_N Signed-off-by: Suraj Singh * Spotless fix Signed-off-by: Suraj Singh * Fix unit tests Signed-off-by: Suraj Singh --------- Signed-off-by: Suraj Singh --- .../replication/SegmentReplicationBaseIT.java | 20 - .../remotestore/RemoteStoreStatsIT.java | 8 +- .../RemoteIndexSnapshotStatusApiIT.java | 1 + .../opensearch/index/shard/IndexShard.java | 54 +- .../shard/RemoteStoreRefreshListener.java | 9 +- .../opensearch/index/shard/StoreRecovery.java | 2 +- .../store/RemoteSegmentStoreDirectory.java | 13 +- .../org/opensearch/index/store/Store.java | 10 +- .../metadata/RemoteSegmentMetadata.java | 53 +- .../recovery/PeerRecoveryTargetService.java | 2 +- .../replication/GetSegmentFilesResponse.java | 4 + .../RemoteStoreReplicationSource.java | 50 +- .../replication/SegmentReplicationTarget.java | 55 +- .../RemoteStoreRefreshListenerTests.java | 2 +- ...overyWithRemoteTranslogOnPrimaryTests.java | 37 +- .../SegmentReplicationIndexShardTests.java | 896 ++---------------- ...licationWithNodeToNodeIndexShardTests.java | 697 ++++++++++++++ ...tReplicationWithRemoteIndexShardTests.java | 133 ++- .../RemoteSegmentStoreDirectoryTests.java | 47 +- .../RemoteSegmentMetadataHandlerTests.java | 28 +- ...teStorePeerRecoverySourceHandlerTests.java | 29 +- .../RemoteStoreReplicationSourceTests.java | 199 ++-- ...enSearchIndexLevelReplicationTestCase.java | 6 +- .../index/shard/IndexShardTestCase.java | 64 +- 24 files changed, 1291 insertions(+), 1128 deletions(-) create mode 100644 server/src/test/java/org/opensearch/index/shard/SegmentReplicationWithNodeToNodeIndexShardTests.java diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java index 64c6ebbb33482..cfb2e11c8c429 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java @@ -8,7 +8,6 @@ package org.opensearch.indices.replication; -import org.opensearch.action.admin.indices.replication.SegmentReplicationStatsResponse; import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.IndexMetadata; @@ -24,7 +23,6 @@ import org.opensearch.core.index.shard.ShardId; import org.opensearch.index.IndexModule; import org.opensearch.index.IndexService; -import org.opensearch.index.SegmentReplicationPerGroupStats; import org.opensearch.index.SegmentReplicationShardStats; import org.opensearch.index.engine.Engine; import org.opensearch.index.shard.IndexShard; @@ -134,24 +132,6 @@ protected void waitForSearchableDocs(long docCount, String... nodes) throws Exce waitForSearchableDocs(docCount, Arrays.stream(nodes).collect(Collectors.toList())); } - protected void waitForSegmentReplication(String node) throws Exception { - assertBusy(() -> { - SegmentReplicationStatsResponse segmentReplicationStatsResponse = client(node).admin() - .indices() - .prepareSegmentReplicationStats(INDEX_NAME) - .setDetailed(true) - .execute() - .actionGet(); - final SegmentReplicationPerGroupStats perGroupStats = segmentReplicationStatsResponse.getReplicationStats() - .get(INDEX_NAME) - .get(0); - assertEquals( - perGroupStats.getReplicaStats().stream().findFirst().get().getCurrentReplicationState().getStage(), - SegmentReplicationState.Stage.DONE - ); - }, 1, TimeUnit.MINUTES); - } - protected void verifyStoreContent() throws Exception { assertBusy(() -> { final ClusterState clusterState = getClusterState(); diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java index 840e3a07ed255..1c7f14701b3e7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java @@ -272,12 +272,12 @@ public void testDownloadStatsCorrectnessSinglePrimarySingleReplica() throws Exce assertTrue( replicaStats.directoryFileTransferTrackerStats.transferredBytesStarted > 0 && primaryStats.uploadBytesStarted - - zeroStatePrimaryStats.uploadBytesStarted == replicaStats.directoryFileTransferTrackerStats.transferredBytesStarted + - zeroStatePrimaryStats.uploadBytesStarted >= replicaStats.directoryFileTransferTrackerStats.transferredBytesStarted ); assertTrue( replicaStats.directoryFileTransferTrackerStats.transferredBytesSucceeded > 0 && primaryStats.uploadBytesSucceeded - - zeroStatePrimaryStats.uploadBytesSucceeded == replicaStats.directoryFileTransferTrackerStats.transferredBytesSucceeded + - zeroStatePrimaryStats.uploadBytesSucceeded >= replicaStats.directoryFileTransferTrackerStats.transferredBytesSucceeded ); // Assert zero failures assertEquals(0, primaryStats.uploadBytesFailed); @@ -369,8 +369,8 @@ public void testDownloadStatsCorrectnessSinglePrimaryMultipleReplicaShards() thr assertEquals(0, uploadsFailed); assertEquals(0, uploadBytesFailed); for (int j = 0; j < response.getSuccessfulShards() - 1; j++) { - assertEquals(uploadBytesStarted - zeroStatePrimaryStats.uploadBytesStarted, (long) downloadBytesStarted.get(j)); - assertEquals(uploadBytesSucceeded - zeroStatePrimaryStats.uploadBytesSucceeded, (long) downloadBytesSucceeded.get(j)); + assertTrue(uploadBytesStarted - zeroStatePrimaryStats.uploadBytesStarted > downloadBytesStarted.get(j)); + assertTrue(uploadBytesSucceeded - zeroStatePrimaryStats.uploadBytesSucceeded > downloadBytesSucceeded.get(j)); assertEquals(0, (long) downloadBytesFailed.get(j)); } }, 60, TimeUnit.SECONDS); diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/RemoteIndexSnapshotStatusApiIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/RemoteIndexSnapshotStatusApiIT.java index b6a5188c99335..d17410d8921ed 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/RemoteIndexSnapshotStatusApiIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/RemoteIndexSnapshotStatusApiIT.java @@ -57,6 +57,7 @@ protected Settings nodeSettings(int nodeOrdinal) { .put(super.nodeSettings(nodeOrdinal)) .put(ThreadPool.ESTIMATED_TIME_INTERVAL_SETTING.getKey(), 0) // We have tests that check by-timestamp order .put(FeatureFlags.REMOTE_STORE, "true") + .put(FeatureFlags.SEGMENT_REPLICATION_EXPERIMENTAL, "true") .put(remoteStoreClusterSettings("remote-store-repo-name")) .build(); } diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index bb5088866edb6..2b85193275a13 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -199,9 +199,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.Comparator; import java.util.EnumSet; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; @@ -1988,7 +1986,7 @@ private long recoverLocallyUpToGlobalCheckpoint() { final Optional safeCommit; final long globalCheckpoint; try { - final String translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(Translog.TRANSLOG_UUID_KEY); + final String translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(TRANSLOG_UUID_KEY); globalCheckpoint = Translog.readGlobalCheckpoint(translogConfig.getTranslogPath(), translogUUID); safeCommit = store.findSafeIndexCommit(globalCheckpoint); } catch (org.apache.lucene.index.IndexNotFoundException e) { @@ -2088,7 +2086,7 @@ private long recoverLocallyUptoLastCommit() { try { seqNo = Long.parseLong(store.readLastCommittedSegmentsInfo().getUserData().get(MAX_SEQ_NO)); } catch (org.apache.lucene.index.IndexNotFoundException e) { - logger.error("skip local recovery as no index commit found", e); + logger.error("skip local recovery as no index commit found"); return UNASSIGNED_SEQ_NO; } catch (Exception e) { logger.error("skip local recovery as failed to find the safe commit", e); @@ -2242,7 +2240,7 @@ private void loadGlobalCheckpointToReplicationTracker() throws IOException { // we have to set it before we open an engine and recover from the translog because // acquiring a snapshot from the translog causes a sync which causes the global checkpoint to be pulled in, // and an engine can be forced to close in ctor which also causes the global checkpoint to be pulled in. - final String translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(Translog.TRANSLOG_UUID_KEY); + final String translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(TRANSLOG_UUID_KEY); final long globalCheckpoint = Translog.readGlobalCheckpoint(translogConfig.getTranslogPath(), translogUUID); replicationTracker.updateGlobalCheckpointOnReplica(globalCheckpoint, "read from translog checkpoint"); } @@ -2326,7 +2324,7 @@ private void innerOpenEngineAndTranslog(LongSupplier globalCheckpointSupplier, b assert currentEngineReference.get() == null : "engine is running"; verifyNotClosed(); if (indexSettings.isRemoteStoreEnabled() && syncFromRemote) { - syncSegmentsFromRemoteSegmentStore(false, true, true); + syncSegmentsFromRemoteSegmentStore(false, true); } if (indexSettings.isRemoteTranslogStoreEnabled() && shardRouting.primary()) { if (syncFromRemote) { @@ -4555,7 +4553,7 @@ public void close() throws IOException { }; IOUtils.close(currentEngineReference.getAndSet(readOnlyEngine)); if (indexSettings.isRemoteStoreEnabled()) { - syncSegmentsFromRemoteSegmentStore(false, true, true); + syncSegmentsFromRemoteSegmentStore(false, true); } if (indexSettings.isRemoteTranslogStoreEnabled() && shardRouting.primary()) { syncRemoteTranslogAndUpdateGlobalCheckpoint(); @@ -4616,13 +4614,11 @@ public void syncTranslogFilesFromRemoteTranslog() throws IOException { * Downloads segments from remote segment store. * @param overrideLocal flag to override local segment files with those in remote store * @param refreshLevelSegmentSync last refresh checkpoint is used if true, commit checkpoint otherwise - * @param shouldCommit if the shard requires committing the changes after sync from remote. * @throws IOException if exception occurs while reading segments from remote store */ - public void syncSegmentsFromRemoteSegmentStore(boolean overrideLocal, boolean refreshLevelSegmentSync, boolean shouldCommit) - throws IOException { + public void syncSegmentsFromRemoteSegmentStore(boolean overrideLocal, boolean refreshLevelSegmentSync) throws IOException { assert indexSettings.isRemoteStoreEnabled(); - logger.info("Downloading segments from remote segment store"); + logger.trace("Downloading segments from remote segment store"); RemoteSegmentStoreDirectory remoteDirectory = getRemoteDirectory(); // We need to call RemoteSegmentStoreDirectory.init() in order to get latest metadata of the files that // are uploaded to the remote segment store. @@ -4647,7 +4643,6 @@ public void syncSegmentsFromRemoteSegmentStore(boolean overrideLocal, boolean re } else { storeDirectory = store.directory(); } - Set localSegmentFiles = Sets.newHashSet(storeDirectory.listAll()); copySegmentFiles(storeDirectory, remoteDirectory, null, uploadedSegments, overrideLocal); if (refreshLevelSegmentSync && remoteSegmentMetadata != null) { @@ -4661,37 +4656,8 @@ public void syncSegmentsFromRemoteSegmentStore(boolean overrideLocal, boolean re indexInput, remoteSegmentMetadata.getGeneration() ); - // Replicas never need a local commit - if (shouldCommit) { - if (this.shardRouting.primary()) { - long processedLocalCheckpoint = Long.parseLong(infosSnapshot.getUserData().get(LOCAL_CHECKPOINT_KEY)); - // Following code block makes sure to use SegmentInfosSnapshot in the remote store if generation differs - // with local filesystem. If local filesystem already has segments_N+2 and infosSnapshot has generation N, - // after commit, there would be 2 files that would be created segments_N+1 and segments_N+2. With the - // policy of preserving only the latest commit, we will delete segments_N+1 which in fact is the part of the - // latest commit. - Optional localMaxSegmentInfos = localSegmentFiles.stream() - .filter(file -> file.startsWith(IndexFileNames.SEGMENTS)) - .max(Comparator.comparingLong(SegmentInfos::generationFromSegmentsFileName)); - if (localMaxSegmentInfos.isPresent() - && infosSnapshot.getGeneration() < SegmentInfos.generationFromSegmentsFileName(localMaxSegmentInfos.get()) - - 1) { - // If remote translog is not enabled, local translog will be created with different UUID. - // This fails in Store.trimUnsafeCommits() as translog UUID of checkpoint and SegmentInfos needs - // to be same. Following code block make sure to have the same UUID. - if (indexSettings.isRemoteTranslogStoreEnabled() == false) { - SegmentInfos localSegmentInfos = store.readLastCommittedSegmentsInfo(); - Map userData = new HashMap<>(infosSnapshot.getUserData()); - userData.put(TRANSLOG_UUID_KEY, localSegmentInfos.userData.get(TRANSLOG_UUID_KEY)); - infosSnapshot.setUserData(userData, false); - } - storeDirectory.deleteFile(localMaxSegmentInfos.get()); - } - store.commitSegmentInfos(infosSnapshot, processedLocalCheckpoint, processedLocalCheckpoint); - } - } else { - finalizeReplication(infosSnapshot); - } + long processedLocalCheckpoint = Long.parseLong(infosSnapshot.getUserData().get(LOCAL_CHECKPOINT_KEY)); + store.commitSegmentInfos(infosSnapshot, processedLocalCheckpoint, processedLocalCheckpoint); } } } catch (IOException e) { @@ -4716,7 +4682,7 @@ public void syncSegmentsFromGivenRemoteSegmentStore( long primaryTerm, long commitGeneration ) throws IOException { - logger.info("Downloading segments from given remote segment store"); + logger.trace("Downloading segments from given remote segment store"); RemoteSegmentStoreDirectory remoteDirectory = null; if (remoteStore != null) { remoteDirectory = getRemoteDirectory(); diff --git a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java index 4a70ff04770d3..8dd0c8b9d4405 100644 --- a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java +++ b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java @@ -220,7 +220,7 @@ private synchronized boolean syncSegments() { public void onResponse(Void unused) { try { // Start metadata file upload - uploadMetadata(localSegmentsPostRefresh, segmentInfos); + uploadMetadata(localSegmentsPostRefresh, segmentInfos, checkpoint); clearStaleFilesFromLocalSegmentChecksumMap(localSegmentsPostRefresh); onSuccessfulSegmentsSync( refreshTimeMs, @@ -327,7 +327,8 @@ private boolean isRefreshAfterCommit() throws IOException { && !remoteDirectory.containsFile(lastCommittedLocalSegmentFileName, getChecksumOfLocalFile(lastCommittedLocalSegmentFileName))); } - void uploadMetadata(Collection localSegmentsPostRefresh, SegmentInfos segmentInfos) throws IOException { + void uploadMetadata(Collection localSegmentsPostRefresh, SegmentInfos segmentInfos, ReplicationCheckpoint replicationCheckpoint) + throws IOException { final long maxSeqNo = ((InternalEngine) indexShard.getEngine()).currentOngoingRefreshCheckpoint(); SegmentInfos segmentInfosSnapshot = segmentInfos.clone(); Map userData = segmentInfosSnapshot.getUserData(); @@ -344,8 +345,8 @@ void uploadMetadata(Collection localSegmentsPostRefresh, SegmentInfos se localSegmentsPostRefresh, segmentInfosSnapshot, storeDirectory, - indexShard.getOperationPrimaryTerm(), - translogFileGeneration + translogFileGeneration, + replicationCheckpoint ); } } diff --git a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java index 5897fa7d513d7..2c8a186a6ed53 100644 --- a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java @@ -530,7 +530,7 @@ private void recoverFromRemoteStore(IndexShard indexShard) throws IndexShardReco remoteStore.incRef(); try { // Download segments from remote segment store - indexShard.syncSegmentsFromRemoteSegmentStore(true, true, true); + indexShard.syncSegmentsFromRemoteSegmentStore(true, true); if (store.directory().listAll().length == 0) { store.createEmpty(indexShard.indexSettings().getIndexVersionCreated().luceneVersion); diff --git a/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java b/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java index 8ee267cb67e68..8dfdb3e2c8e06 100644 --- a/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java +++ b/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java @@ -44,6 +44,7 @@ import org.opensearch.index.store.lockmanager.RemoteStoreLockManager; import org.opensearch.index.store.remote.metadata.RemoteSegmentMetadata; import org.opensearch.index.store.remote.metadata.RemoteSegmentMetadataHandler; +import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; import org.opensearch.threadpool.ThreadPool; import java.io.FileNotFoundException; @@ -603,19 +604,20 @@ public boolean containsFile(String localFilename, String checksum) { * @param segmentFiles segment files that are part of the shard at the time of the latest refresh * @param segmentInfosSnapshot SegmentInfos bytes to store as part of metadata file * @param storeDirectory instance of local directory to temporarily create metadata file before upload - * @param primaryTerm primary term to be used in the name of metadata file + * @param translogGeneration translog generation + * @param replicationCheckpoint ReplicationCheckpoint of primary shard * @throws IOException in case of I/O error while uploading the metadata file */ public void uploadMetadata( Collection segmentFiles, SegmentInfos segmentInfosSnapshot, Directory storeDirectory, - long primaryTerm, - long translogGeneration + long translogGeneration, + ReplicationCheckpoint replicationCheckpoint ) throws IOException { synchronized (this) { String metadataFilename = MetadataFilenameUtils.getMetadataFilename( - primaryTerm, + replicationCheckpoint.getPrimaryTerm(), segmentInfosSnapshot.getGeneration(), translogGeneration, metadataUploadCounter.incrementAndGet(), @@ -646,8 +648,7 @@ public void uploadMetadata( new RemoteSegmentMetadata( RemoteSegmentMetadata.fromMapOfStrings(uploadedSegments), segmentInfoSnapshotByteArray, - primaryTerm, - segmentInfosSnapshot.getGeneration() + replicationCheckpoint ) ); } diff --git a/server/src/main/java/org/opensearch/index/store/Store.java b/server/src/main/java/org/opensearch/index/store/Store.java index a67b87f58110c..921deae41946a 100644 --- a/server/src/main/java/org/opensearch/index/store/Store.java +++ b/server/src/main/java/org/opensearch/index/store/Store.java @@ -845,22 +845,24 @@ private void cleanupFiles(Collection filesToConsiderForCleanup, String r * @param tmpToFileName Map of temporary replication file to actual file name * @param infosBytes bytes[] of SegmentInfos supposed to be sent over by primary excluding segment_N file * @param segmentsGen segment generation number - * @param consumer consumer for generated SegmentInfos + * @param finalizeConsumer consumer for action on passed in SegmentInfos + * @param renameConsumer consumer for action on temporary copied over files * @throws IOException Exception while reading store and building segment infos */ public void buildInfosFromBytes( Map tmpToFileName, byte[] infosBytes, long segmentsGen, - CheckedConsumer consumer + CheckedConsumer finalizeConsumer, + CheckedConsumer, IOException> renameConsumer ) throws IOException { metadataLock.writeLock().lock(); try { final List values = new ArrayList<>(tmpToFileName.values()); incRefFileDeleter(values); try { - renameTempFilesSafe(tmpToFileName); - consumer.accept(buildSegmentInfos(infosBytes, segmentsGen)); + renameConsumer.accept(tmpToFileName); + finalizeConsumer.accept(buildSegmentInfos(infosBytes, segmentsGen)); } finally { decRefFileDeleter(values); } diff --git a/server/src/main/java/org/opensearch/index/store/remote/metadata/RemoteSegmentMetadata.java b/server/src/main/java/org/opensearch/index/store/remote/metadata/RemoteSegmentMetadata.java index 9a479346ff711..15703a2c02b13 100644 --- a/server/src/main/java/org/opensearch/index/store/remote/metadata/RemoteSegmentMetadata.java +++ b/server/src/main/java/org/opensearch/index/store/remote/metadata/RemoteSegmentMetadata.java @@ -14,7 +14,10 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; +import org.opensearch.core.index.Index; +import org.opensearch.core.index.shard.ShardId; import org.opensearch.index.store.RemoteSegmentStoreDirectory; +import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; /** * Metadata object for Remote Segment @@ -38,19 +41,16 @@ public class RemoteSegmentMetadata { private final byte[] segmentInfosBytes; - private final long primaryTerm; - private final long generation; + private final ReplicationCheckpoint replicationCheckpoint; public RemoteSegmentMetadata( Map metadata, byte[] segmentInfosBytes, - long primaryTerm, - long generation + ReplicationCheckpoint replicationCheckpoint ) { this.metadata = metadata; this.segmentInfosBytes = segmentInfosBytes; - this.generation = generation; - this.primaryTerm = primaryTerm; + this.replicationCheckpoint = replicationCheckpoint; } /** @@ -66,11 +66,15 @@ public byte[] getSegmentInfosBytes() { } public long getGeneration() { - return generation; + return replicationCheckpoint.getSegmentsGen(); } public long getPrimaryTerm() { - return primaryTerm; + return replicationCheckpoint.getPrimaryTerm(); + } + + public ReplicationCheckpoint getReplicationCheckpoint() { + return replicationCheckpoint; } /** @@ -99,19 +103,42 @@ public static Map f public void write(IndexOutput out) throws IOException { out.writeMapOfStrings(toMapOfStrings()); - out.writeLong(generation); - out.writeLong(primaryTerm); + writeCheckpointToIndexOutput(replicationCheckpoint, out); out.writeLong(segmentInfosBytes.length); out.writeBytes(segmentInfosBytes, segmentInfosBytes.length); } public static RemoteSegmentMetadata read(IndexInput indexInput) throws IOException { Map metadata = indexInput.readMapOfStrings(); - long generation = indexInput.readLong(); - long primaryTerm = indexInput.readLong(); + ReplicationCheckpoint replicationCheckpoint = readCheckpointFromIndexInput(indexInput); int byteArraySize = (int) indexInput.readLong(); byte[] segmentInfosBytes = new byte[byteArraySize]; indexInput.readBytes(segmentInfosBytes, 0, byteArraySize); - return new RemoteSegmentMetadata(RemoteSegmentMetadata.fromMapOfStrings(metadata), segmentInfosBytes, primaryTerm, generation); + return new RemoteSegmentMetadata(RemoteSegmentMetadata.fromMapOfStrings(metadata), segmentInfosBytes, replicationCheckpoint); + } + + public static void writeCheckpointToIndexOutput(ReplicationCheckpoint replicationCheckpoint, IndexOutput out) throws IOException { + ShardId shardId = replicationCheckpoint.getShardId(); + // Write ShardId + out.writeString(shardId.getIndex().getName()); + out.writeString(shardId.getIndex().getUUID()); + out.writeVInt(shardId.getId()); + // Write remaining checkpoint fields + out.writeLong(replicationCheckpoint.getPrimaryTerm()); + out.writeLong(replicationCheckpoint.getSegmentsGen()); + out.writeLong(replicationCheckpoint.getSegmentInfosVersion()); + out.writeLong(replicationCheckpoint.getLength()); + out.writeString(replicationCheckpoint.getCodec()); + } + + private static ReplicationCheckpoint readCheckpointFromIndexInput(IndexInput in) throws IOException { + return new ReplicationCheckpoint( + new ShardId(new Index(in.readString(), in.readString()), in.readVInt()), + in.readLong(), + in.readLong(), + in.readLong(), + in.readLong(), + in.readString() + ); } } diff --git a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java index 0ba57a9ee7f65..386b2e0e8192d 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java @@ -245,7 +245,7 @@ private void doRecovery(final long recoveryId, final StartRecoveryRequest preExi indexShard.prepareForIndexRecovery(); final boolean hasRemoteSegmentStore = indexShard.indexSettings().isRemoteStoreEnabled(); if (hasRemoteSegmentStore) { - indexShard.syncSegmentsFromRemoteSegmentStore(false, false, true); + indexShard.syncSegmentsFromRemoteSegmentStore(false, false); } final boolean hasRemoteTranslog = recoveryTarget.state().getPrimary() == false && indexShard.isRemoteTranslogEnabled(); final boolean hasNoTranslog = indexShard.indexSettings().isRemoteSnapshot(); diff --git a/server/src/main/java/org/opensearch/indices/replication/GetSegmentFilesResponse.java b/server/src/main/java/org/opensearch/indices/replication/GetSegmentFilesResponse.java index 89d50a17464a6..33a84833f2418 100644 --- a/server/src/main/java/org/opensearch/indices/replication/GetSegmentFilesResponse.java +++ b/server/src/main/java/org/opensearch/indices/replication/GetSegmentFilesResponse.java @@ -33,6 +33,10 @@ public GetSegmentFilesResponse(StreamInput out) throws IOException { out.readList(StoreFileMetadata::new); } + public List getFiles() { + return files; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(files); diff --git a/server/src/main/java/org/opensearch/indices/replication/RemoteStoreReplicationSource.java b/server/src/main/java/org/opensearch/indices/replication/RemoteStoreReplicationSource.java index c5be7635782af..7f444d0031533 100644 --- a/server/src/main/java/org/opensearch/indices/replication/RemoteStoreReplicationSource.java +++ b/server/src/main/java/org/opensearch/indices/replication/RemoteStoreReplicationSource.java @@ -10,7 +10,9 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; +import org.apache.lucene.store.IOContext; import org.apache.lucene.util.Version; import org.opensearch.action.ActionListener; import org.opensearch.index.shard.IndexShard; @@ -21,6 +23,8 @@ import org.opensearch.index.store.remote.metadata.RemoteSegmentMetadata; import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; +import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; @@ -33,12 +37,16 @@ */ public class RemoteStoreReplicationSource implements SegmentReplicationSource { - private static final Logger logger = LogManager.getLogger(PrimaryShardReplicationSource.class); + private static final Logger logger = LogManager.getLogger(RemoteStoreReplicationSource.class); private final IndexShard indexShard; + private final RemoteSegmentStoreDirectory remoteDirectory; public RemoteStoreReplicationSource(IndexShard indexShard) { this.indexShard = indexShard; + FilterDirectory remoteStoreDirectory = (FilterDirectory) indexShard.remoteStore().directory(); + FilterDirectory byteSizeCachingStoreDirectory = (FilterDirectory) remoteStoreDirectory.getDelegate(); + this.remoteDirectory = (RemoteSegmentStoreDirectory) byteSizeCachingStoreDirectory.getDelegate(); } @Override @@ -47,15 +55,11 @@ public void getCheckpointMetadata( ReplicationCheckpoint checkpoint, ActionListener listener ) { - FilterDirectory remoteStoreDirectory = (FilterDirectory) indexShard.remoteStore().directory(); - FilterDirectory byteSizeCachingStoreDirectory = (FilterDirectory) remoteStoreDirectory.getDelegate(); - RemoteSegmentStoreDirectory remoteDirectory = (RemoteSegmentStoreDirectory) byteSizeCachingStoreDirectory.getDelegate(); - Map metadataMap; // TODO: Need to figure out a way to pass this information for segment metadata via remote store. final Version version = indexShard.getSegmentInfosSnapshot().get().getCommitLuceneVersion(); try { - RemoteSegmentMetadata mdFile = remoteDirectory.readLatestMetadataFile(); + RemoteSegmentMetadata mdFile = remoteDirectory.init(); // During initial recovery flow, the remote store might not have metadata as primary hasn't uploaded anything yet. if (mdFile == null && indexShard.state().equals(IndexShardState.STARTED) == false) { listener.onResponse(new CheckpointInfoResponse(checkpoint, Collections.emptyMap(), null)); @@ -77,8 +81,7 @@ public void getCheckpointMetadata( ) ) ); - // TODO: GET current checkpoint from remote store. - listener.onResponse(new CheckpointInfoResponse(checkpoint, metadataMap, null)); + listener.onResponse(new CheckpointInfoResponse(mdFile.getReplicationCheckpoint(), metadataMap, mdFile.getSegmentInfosBytes())); } catch (Exception e) { listener.onFailure(e); } @@ -93,8 +96,33 @@ public void getSegmentFiles( ActionListener listener ) { try { - indexShard.syncSegmentsFromRemoteSegmentStore(false, true, false); - listener.onResponse(new GetSegmentFilesResponse(Collections.emptyList())); + if (filesToFetch.isEmpty()) { + listener.onResponse(new GetSegmentFilesResponse(Collections.emptyList())); + return; + } + logger.trace("Downloading segments files from remote store {}", filesToFetch); + + RemoteSegmentMetadata remoteSegmentMetadata = remoteDirectory.readLatestMetadataFile(); + List downloadedSegments = new ArrayList<>(); + Collection directoryFiles = List.of(indexShard.store().directory().listAll()); + if (remoteSegmentMetadata != null) { + try { + indexShard.store().incRef(); + indexShard.remoteStore().incRef(); + final Directory storeDirectory = indexShard.store().directory(); + for (StoreFileMetadata fileMetadata : filesToFetch) { + String file = fileMetadata.name(); + assert directoryFiles.contains(file) == false : "Local store already contains the file " + file; + storeDirectory.copyFrom(remoteDirectory, file, file, IOContext.DEFAULT); + downloadedSegments.add(fileMetadata); + } + logger.trace("Downloaded segments from remote store {}", downloadedSegments); + } finally { + indexShard.store().decRef(); + indexShard.remoteStore().decRef(); + } + } + listener.onResponse(new GetSegmentFilesResponse(downloadedSegments)); } catch (Exception e) { listener.onFailure(e); } @@ -102,6 +130,6 @@ public void getSegmentFiles( @Override public String getDescription() { - return "remote store"; + return "RemoteStoreReplicationSource"; } } diff --git a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java index 2e0f5a8c0ad1f..c22701dfc94ce 100644 --- a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java +++ b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java @@ -13,10 +13,6 @@ import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.store.AlreadyClosedException; -import org.apache.lucene.store.BufferedChecksumIndexInput; -import org.apache.lucene.store.ByteBuffersDataInput; -import org.apache.lucene.store.ByteBuffersIndexInput; -import org.apache.lucene.store.ChecksumIndexInput; import org.opensearch.OpenSearchCorruptionException; import org.opensearch.OpenSearchException; import org.opensearch.action.ActionListener; @@ -36,10 +32,10 @@ import org.opensearch.indices.replication.common.ReplicationTarget; import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Arrays; import java.util.List; import java.util.Locale; +import java.util.Map; +import java.util.stream.Collectors; /** * Represents the target of a replication event. @@ -55,10 +51,6 @@ public class SegmentReplicationTarget extends ReplicationTarget { public final static String REPLICATION_PREFIX = "replication."; - public ReplicationCheckpoint getCheckpoint() { - return this.checkpoint; - } - public SegmentReplicationTarget(IndexShard indexShard, SegmentReplicationSource source, ReplicationListener listener) { super("replication_target", indexShard, new ReplicationLuceneIndex(), listener); this.checkpoint = indexShard.getLatestReplicationCheckpoint(); @@ -117,6 +109,10 @@ public boolean reset(CancellableThreads newTargetCancellableThreads) throws IOEx return false; } + public ReplicationCheckpoint getCheckpoint() { + return this.checkpoint; + } + @Override public void writeFileChunk( StoreFileMetadata metadata, @@ -162,7 +158,7 @@ public void startReplication(ActionListener listener) { }, listener::onFailure); getFilesListener.whenComplete(response -> { - finalizeReplication(checkpointInfoListener.result()); + finalizeReplication(checkpointInfoListener.result(), getFilesListener.result()); listener.onResponse(null); }, listener::onFailure); } @@ -193,23 +189,34 @@ private List getFiles(CheckpointInfoResponse checkpointInfo) return diff.missing; } - private void finalizeReplication(CheckpointInfoResponse checkpointInfoResponse) throws OpenSearchCorruptionException { - // TODO: Refactor the logic so that finalize doesn't have to be invoked for remote store as source - if (source instanceof RemoteStoreReplicationSource) { - state.setStage(SegmentReplicationState.Stage.FINALIZE_REPLICATION); - return; - } + private void finalizeReplication(CheckpointInfoResponse checkpointInfoResponse, GetSegmentFilesResponse getSegmentFilesResponse) + throws OpenSearchCorruptionException { cancellableThreads.checkForCancel(); state.setStage(SegmentReplicationState.Stage.FINALIZE_REPLICATION); + // Handle empty SegmentInfos bytes for recovering replicas + if (checkpointInfoResponse.getInfosBytes() == null) { + return; + } Store store = null; try { store = store(); store.incRef(); + Map tempFileNames; + if (this.indexShard.indexSettings().isRemoteStoreEnabled() == true) { + tempFileNames = getSegmentFilesResponse.getFiles() + .stream() + .collect(Collectors.toMap(StoreFileMetadata::name, StoreFileMetadata::name)); + } else { + tempFileNames = multiFileWriter.getTempFileNames(); + } store.buildInfosFromBytes( - multiFileWriter.getTempFileNames(), + tempFileNames, checkpointInfoResponse.getInfosBytes(), checkpointInfoResponse.getCheckpoint().getSegmentsGen(), - indexShard::finalizeReplication + indexShard::finalizeReplication, + this.indexShard.indexSettings().isRemoteStoreEnabled() == true + ? (files) -> {} + : (files) -> indexShard.store().renameTempFilesSafe(files) ); } catch (CorruptIndexException | IndexFormatTooNewException | IndexFormatTooOldException ex) { // this is a fatal exception at this stage. @@ -247,16 +254,6 @@ private void finalizeReplication(CheckpointInfoResponse checkpointInfoResponse) } } - /** - * This method formats our byte[] containing the primary's SegmentInfos into lucene's {@link ChecksumIndexInput} that can be - * passed to SegmentInfos.readCommit - */ - private ChecksumIndexInput toIndexInput(byte[] input) { - return new BufferedChecksumIndexInput( - new ByteBuffersIndexInput(new ByteBuffersDataInput(Arrays.asList(ByteBuffer.wrap(input))), "SegmentInfos") - ); - } - /** * Trigger a cancellation, this method will not close the target a subsequent call to #fail is required from target service. */ diff --git a/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java b/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java index f13f89c6e067c..66938eec10513 100644 --- a/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java @@ -413,7 +413,7 @@ private Tuple m when(remoteStore.directory()).thenReturn(remoteStoreFilterDirectory); // Mock indexShard.getOperationPrimaryTerm() - when(shard.getOperationPrimaryTerm()).thenReturn(indexShard.getOperationPrimaryTerm()); + when(shard.getLatestReplicationCheckpoint()).thenReturn(indexShard.getLatestReplicationCheckpoint()); // Mock indexShard.routingEntry().primary() when(shard.routingEntry()).thenReturn(indexShard.routingEntry()); diff --git a/server/src/test/java/org/opensearch/index/shard/ReplicaRecoveryWithRemoteTranslogOnPrimaryTests.java b/server/src/test/java/org/opensearch/index/shard/ReplicaRecoveryWithRemoteTranslogOnPrimaryTests.java index 690c7955ff338..20b3dfc0f93a6 100644 --- a/server/src/test/java/org/opensearch/index/shard/ReplicaRecoveryWithRemoteTranslogOnPrimaryTests.java +++ b/server/src/test/java/org/opensearch/index/shard/ReplicaRecoveryWithRemoteTranslogOnPrimaryTests.java @@ -9,22 +9,27 @@ package org.opensearch.index.shard; import org.junit.Assert; +import org.junit.Before; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.routing.RecoverySource; import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.ShardRoutingState; import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.IndexSettings; import org.opensearch.index.engine.DocIdSeqNoAndSource; import org.opensearch.index.engine.NRTReplicationEngine; import org.opensearch.index.engine.NRTReplicationEngineFactory; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.replication.OpenSearchIndexLevelReplicationTestCase; import org.opensearch.index.seqno.SequenceNumbers; +import org.opensearch.index.store.Store; import org.opensearch.index.translog.WriteOnlyTranslogManager; import org.opensearch.indices.recovery.RecoveryTarget; import org.opensearch.indices.replication.common.ReplicationType; import java.io.IOException; +import java.nio.file.Path; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; @@ -39,15 +44,24 @@ public class ReplicaRecoveryWithRemoteTranslogOnPrimaryTests extends OpenSearchI .put(IndexSettings.INDEX_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING.getKey(), "100ms") .build(); - public void testStartSequenceForReplicaRecovery() throws Exception { - try (ReplicationGroup shards = createGroup(0, settings, new NRTReplicationEngineFactory())) { + @Before + public void setup() { + // Todo: Remove feature flag once remote store integration with segrep goes GA + FeatureFlags.initializeFeatureFlags( + Settings.builder().put(FeatureFlags.SEGMENT_REPLICATION_EXPERIMENTAL_SETTING.getKey(), "true").build() + ); + } + public void testStartSequenceForReplicaRecovery() throws Exception { + final Path remoteDir = createTempDir(); + final String indexMapping = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": {} }"; + try (ReplicationGroup shards = createGroup(0, settings, indexMapping, new NRTReplicationEngineFactory(), remoteDir)) { shards.startPrimary(); final IndexShard primary = shards.getPrimary(); int numDocs = shards.indexDocs(randomIntBetween(10, 100)); shards.flush(); - final IndexShard replica = shards.addReplica(); + final IndexShard replica = shards.addReplica(remoteDir); shards.startAll(); allowShardFailures(); @@ -63,6 +77,14 @@ public void testStartSequenceForReplicaRecovery() throws Exception { int moreDocs = shards.indexDocs(randomIntBetween(20, 100)); shards.flush(); + final ShardRouting replicaRouting2 = newShardRouting( + replicaRouting.shardId(), + replicaRouting.currentNodeId(), + false, + ShardRoutingState.INITIALIZING, + RecoverySource.PeerRecoverySource.INSTANCE + ); + Store remoteStore = createRemoteStore(remoteDir, replicaRouting2, newIndexMetadata); IndexShard newReplicaShard = newShard( newShardRouting( replicaRouting.shardId(), @@ -80,7 +102,7 @@ public void testStartSequenceForReplicaRecovery() throws Exception { replica.getGlobalCheckpointSyncer(), replica.getRetentionLeaseSyncer(), EMPTY_EVENT_LISTENER, - null + remoteStore ); shards.addReplica(newReplicaShard); AtomicBoolean assertDone = new AtomicBoolean(false); @@ -103,7 +125,6 @@ public IndexShard indexShard() { return idxShard; } }); - shards.flush(); replicateSegments(primary, shards.getReplicas()); shards.assertAllEqual(numDocs + moreDocs); @@ -111,7 +132,9 @@ public IndexShard indexShard() { } public void testNoTranslogHistoryTransferred() throws Exception { - try (ReplicationGroup shards = createGroup(0, settings, new NRTReplicationEngineFactory())) { + final Path remoteDir = createTempDir(); + final String indexMapping = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": {} }"; + try (ReplicationGroup shards = createGroup(0, settings, indexMapping, new NRTReplicationEngineFactory(), remoteDir)) { // Step1 - Start primary, index docs, flush, index more docs, check translog in primary as expected shards.startPrimary(); @@ -123,7 +146,7 @@ public void testNoTranslogHistoryTransferred() throws Exception { assertEquals(numDocs + moreDocs, getTranslog(primary).totalOperations()); // Step 2 - Start replica, recovery happens, check docs recovered till last flush - final IndexShard replica = shards.addReplica(); + final IndexShard replica = shards.addReplica(remoteDir); shards.startAll(); assertEquals(docIdAndSeqNosAfterFlush, getDocIdAndSeqNos(replica)); assertDocCount(replica, numDocs); diff --git a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java index 9107606326150..12b7341349442 100644 --- a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java @@ -16,7 +16,6 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.admin.indices.flush.FlushRequest; import org.opensearch.action.admin.indices.forcemerge.ForceMergeRequest; -import org.opensearch.action.delete.DeleteRequest; import org.opensearch.action.index.IndexRequest; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.cluster.metadata.IndexMetadata; @@ -31,9 +30,7 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.lease.Releasable; import org.opensearch.index.IndexSettings; -import org.opensearch.index.engine.DocIdSeqNoAndSource; import org.opensearch.index.engine.Engine; -import org.opensearch.index.engine.InternalEngine; import org.opensearch.index.engine.InternalEngineFactory; import org.opensearch.index.engine.NRTReplicationEngine; import org.opensearch.index.engine.NRTReplicationEngineFactory; @@ -44,7 +41,6 @@ import org.opensearch.index.store.StoreFileMetadata; import org.opensearch.index.translog.SnapshotMatchers; import org.opensearch.index.translog.Translog; -import org.opensearch.indices.IndicesService; import org.opensearch.indices.recovery.RecoverySettings; import org.opensearch.indices.recovery.RecoveryTarget; import org.opensearch.indices.replication.CheckpointInfoResponse; @@ -67,22 +63,15 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Consumer; import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import static java.util.Arrays.asList; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Mockito.any; @@ -100,6 +89,41 @@ public class SegmentReplicationIndexShardTests extends OpenSearchIndexLevelRepli .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) .build(); + protected ReplicationGroup getReplicationGroup(int numberOfReplicas) throws IOException { + return createGroup(numberOfReplicas, getIndexSettings(), indexMapping, new NRTReplicationEngineFactory()); + } + + protected ReplicationGroup getReplicationGroup(int numberOfReplicas, String indexMapping) throws IOException { + return createGroup(numberOfReplicas, getIndexSettings(), indexMapping, new NRTReplicationEngineFactory()); + } + + protected Settings getIndexSettings() { + return settings; + } + + /** + * Validates happy path of segment replication where primary index docs which are replicated to replica shards. Assertions + * made on doc count on both primary and replica. + * @throws Exception + */ + public void testReplication() throws Exception { + try (ReplicationGroup shards = createGroup(1, getIndexSettings(), indexMapping, new NRTReplicationEngineFactory());) { + shards.startAll(); + final IndexShard primaryShard = shards.getPrimary(); + final IndexShard replicaShard = shards.getReplicas().get(0); + + // index and replicate segments to replica. + int numDocs = randomIntBetween(10, 20); + shards.indexDocs(numDocs); + primaryShard.refresh("test"); + flushShard(primaryShard); + replicateSegments(primaryShard, List.of(replicaShard)); + + // Assertions + shards.assertAllEqual(numDocs); + } + } + /** * Test that latestReplicationCheckpoint returns null only for docrep enabled indices */ @@ -114,7 +138,7 @@ public void testReplicationCheckpointNullForDocRep() throws IOException { * Test that latestReplicationCheckpoint returns ReplicationCheckpoint for segrep enabled indices */ public void testReplicationCheckpointNotNullForSegRep() throws IOException { - final IndexShard indexShard = newStartedShard(randomBoolean(), settings, new NRTReplicationEngineFactory()); + final IndexShard indexShard = newStartedShard(randomBoolean(), getIndexSettings(), new NRTReplicationEngineFactory()); final ReplicationCheckpoint replicationCheckpoint = indexShard.getLatestReplicationCheckpoint(); assertNotNull(replicationCheckpoint); closeShards(indexShard); @@ -127,7 +151,7 @@ public void testNRTReplicasDoNotAcceptRefreshListeners() throws IOException { } public void testSegmentInfosAndReplicationCheckpointTuple() throws Exception { - try (ReplicationGroup shards = createGroup(1, settings, new NRTReplicationEngineFactory())) { + try (ReplicationGroup shards = createGroup(1, getIndexSettings(), indexMapping, new NRTReplicationEngineFactory())) { shards.startAll(); final IndexShard primary = shards.getPrimary(); final IndexShard replica = shards.getReplicas().get(0); @@ -149,7 +173,7 @@ public void testSegmentInfosAndReplicationCheckpointTuple() throws Exception { assertEquals(1, primary.getLatestReplicationCheckpoint().compareTo(replica.getLatestReplicationCheckpoint())); // index and copy segments to replica. - int numDocs = randomIntBetween(10, 100); + int numDocs = randomIntBetween(10, 20); shards.indexDocs(numDocs); primary.refresh("test"); replicateSegments(primary, List.of(replica)); @@ -172,6 +196,51 @@ public void testSegmentInfosAndReplicationCheckpointTuple() throws Exception { } } + public void testPrimaryRelocationWithSegRepFailure() throws Exception { + final IndexShard primarySource = newStartedShard(true, getIndexSettings()); + int totalOps = randomInt(10); + for (int i = 0; i < totalOps; i++) { + indexDoc(primarySource, "_doc", Integer.toString(i)); + } + IndexShardTestCase.updateRoutingEntry(primarySource, primarySource.routingEntry().relocate(randomAlphaOfLength(10), -1)); + final IndexShard primaryTarget = newShard( + primarySource.routingEntry().getTargetRelocatingShard(), + getIndexSettings(), + new NRTReplicationEngineFactory() + ); + updateMappings(primaryTarget, primarySource.indexSettings().getIndexMetadata()); + + Function, List> replicatePrimaryFunction = (shardList) -> { + try { + throw new IOException("Expected failure"); + } catch (IOException e) { + throw new RuntimeException(e); + } + }; + Exception e = expectThrows( + Exception.class, + () -> recoverReplica( + primaryTarget, + primarySource, + (primary, sourceNode) -> new RecoveryTarget(primary, sourceNode, new ReplicationListener() { + @Override + public void onDone(ReplicationState state) { + throw new AssertionError("recovery must fail"); + } + + @Override + public void onFailure(ReplicationState state, ReplicationFailedException e, boolean sendShardFailure) { + assertEquals(ExceptionsHelper.unwrap(e, IOException.class).getMessage(), "Expected failure"); + } + }), + true, + true, + replicatePrimaryFunction + ) + ); + closeShards(primarySource, primaryTarget); + } + private void assertReplicationCheckpoint(IndexShard shard, SegmentInfos segmentInfos, ReplicationCheckpoint checkpoint) throws IOException { assertNotNull(segmentInfos); @@ -180,7 +249,7 @@ private void assertReplicationCheckpoint(IndexShard shard, SegmentInfos segmentI } public void testIsSegmentReplicationAllowed_WrongEngineType() throws IOException { - final IndexShard indexShard = newShard(false, settings, new InternalEngineFactory()); + final IndexShard indexShard = newShard(false, getIndexSettings(), new InternalEngineFactory()); assertFalse(indexShard.isSegmentReplicationAllowed()); closeShards(indexShard); } @@ -193,13 +262,13 @@ public void testIsSegmentReplicationAllowed_WrongEngineType() throws IOException */ public void testSegmentReplication_With_ReaderClosedConcurrently() throws Exception { String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"foo\": { \"type\": \"keyword\"} }}}"; - try (ReplicationGroup shards = createGroup(1, settings, mappings, new NRTReplicationEngineFactory())) { + try (ReplicationGroup shards = createGroup(1, getIndexSettings(), mappings, new NRTReplicationEngineFactory())) { shards.startAll(); IndexShard primaryShard = shards.getPrimary(); final IndexShard replicaShard = shards.getReplicas().get(0); // Step 1. Ingest numDocs documents & replicate to replica shard - final int numDocs = randomIntBetween(100, 200); + final int numDocs = randomIntBetween(10, 20); logger.info("--> Inserting documents {}", numDocs); for (int i = 0; i < numDocs; i++) { shards.index(new IndexRequest(index.getName()).id(String.valueOf(i)).source("{\"foo\": \"bar\"}", XContentType.JSON)); @@ -242,13 +311,13 @@ public void testSegmentReplication_With_ReaderClosedConcurrently() throws Except */ public void testSegmentReplication_With_EngineClosedConcurrently() throws Exception { String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"foo\": { \"type\": \"keyword\"} }}}"; - try (ReplicationGroup shards = createGroup(1, settings, mappings, new NRTReplicationEngineFactory())) { + try (ReplicationGroup shards = createGroup(1, getIndexSettings(), mappings, new NRTReplicationEngineFactory())) { shards.startAll(); IndexShard primaryShard = shards.getPrimary(); final IndexShard replicaShard = shards.getReplicas().get(0); // Step 1. Ingest numDocs documents - final int numDocs = randomIntBetween(100, 200); + final int numDocs = randomIntBetween(10, 20); logger.info("--> Inserting documents {}", numDocs); for (int i = 0; i < numDocs; i++) { shards.index(new IndexRequest(index.getName()).id(String.valueOf(i)).source("{\"foo\": \"bar\"}", XContentType.JSON)); @@ -284,137 +353,9 @@ public void testSegmentReplication_With_EngineClosedConcurrently() throws Except } } - /** - * Verifies that commits on replica engine resulting from engine or reader close does not cleanup the temporary - * replication files from ongoing round of segment replication - */ - public void testTemporaryFilesNotCleanup() throws Exception { - String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"foo\": { \"type\": \"keyword\"} }}}"; - try (ReplicationGroup shards = createGroup(1, settings, mappings, new NRTReplicationEngineFactory())) { - shards.startAll(); - IndexShard primaryShard = shards.getPrimary(); - final IndexShard replica = shards.getReplicas().get(0); - - // Step 1. Ingest numDocs documents, commit to create commit point on primary & replicate - final int numDocs = randomIntBetween(100, 200); - logger.info("--> Inserting documents {}", numDocs); - for (int i = 0; i < numDocs; i++) { - shards.index(new IndexRequest(index.getName()).id(String.valueOf(i)).source("{\"foo\": \"bar\"}", XContentType.JSON)); - } - assertEqualTranslogOperations(shards, primaryShard); - primaryShard.flush(new FlushRequest().waitIfOngoing(true).force(true)); - replicateSegments(primaryShard, shards.getReplicas()); - shards.assertAllEqual(numDocs); - - // Step 2. Ingest numDocs documents again to create a new commit on primary - logger.info("--> Ingest {} docs again", numDocs); - for (int i = 0; i < numDocs; i++) { - shards.index(new IndexRequest(index.getName()).id(String.valueOf(i)).source("{\"foo\": \"bar\"}", XContentType.JSON)); - } - assertEqualTranslogOperations(shards, primaryShard); - primaryShard.flush(new FlushRequest().waitIfOngoing(true).force(true)); - - // Step 3. Copy segment files to replica shard but prevent commit - final CountDownLatch countDownLatch = new CountDownLatch(1); - Map primaryMetadata; - try (final GatedCloseable segmentInfosSnapshot = primaryShard.getSegmentInfosSnapshot()) { - final SegmentInfos primarySegmentInfos = segmentInfosSnapshot.get(); - primaryMetadata = primaryShard.store().getSegmentMetadataMap(primarySegmentInfos); - } - final SegmentReplicationSourceFactory sourceFactory = mock(SegmentReplicationSourceFactory.class); - final IndicesService indicesService = mock(IndicesService.class); - when(indicesService.getShardOrNull(replica.shardId)).thenReturn(replica); - final SegmentReplicationTargetService targetService = new SegmentReplicationTargetService( - threadPool, - new RecoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), - mock(TransportService.class), - sourceFactory, - indicesService, - clusterService - ); - final Consumer runnablePostGetFiles = (indexShard) -> { - try { - Collection temporaryFiles = Stream.of(indexShard.store().directory().listAll()) - .filter(name -> name.startsWith(SegmentReplicationTarget.REPLICATION_PREFIX)) - .collect(Collectors.toList()); - - // Step 4. Perform a commit on replica shard. - NRTReplicationEngine engine = (NRTReplicationEngine) indexShard.getEngine(); - engine.updateSegments(engine.getSegmentInfosSnapshot().get()); - - // Step 5. Validate temporary files are not deleted from store. - Collection replicaStoreFiles = List.of(indexShard.store().directory().listAll()); - assertTrue(replicaStoreFiles.containsAll(temporaryFiles)); - } catch (IOException e) { - throw new RuntimeException(e); - } - }; - SegmentReplicationSource segmentReplicationSource = getSegmentReplicationSource( - primaryShard, - (repId) -> targetService.get(repId), - runnablePostGetFiles - ); - when(sourceFactory.get(any())).thenReturn(segmentReplicationSource); - targetService.startReplication(replica, getTargetListener(primaryShard, replica, primaryMetadata, countDownLatch)); - countDownLatch.await(30, TimeUnit.SECONDS); - assertEquals("Replication failed", 0, countDownLatch.getCount()); - shards.assertAllEqual(numDocs); - } - } - - public void testSegmentReplication_Index_Update_Delete() throws Exception { - String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"foo\": { \"type\": \"keyword\"} }}}"; - try (ReplicationGroup shards = createGroup(2, settings, mappings, new NRTReplicationEngineFactory())) { - shards.startAll(); - final IndexShard primaryShard = shards.getPrimary(); - - final int numDocs = randomIntBetween(100, 200); - for (int i = 0; i < numDocs; i++) { - shards.index(new IndexRequest(index.getName()).id(String.valueOf(i)).source("{\"foo\": \"bar\"}", XContentType.JSON)); - } - - assertEqualTranslogOperations(shards, primaryShard); - primaryShard.refresh("Test"); - replicateSegments(primaryShard, shards.getReplicas()); - - shards.assertAllEqual(numDocs); - - for (int i = 0; i < numDocs; i++) { - // randomly update docs. - if (randomBoolean()) { - shards.index( - new IndexRequest(index.getName()).id(String.valueOf(i)).source("{ \"foo\" : \"baz\" }", XContentType.JSON) - ); - } - } - assertEqualTranslogOperations(shards, primaryShard); - primaryShard.refresh("Test"); - replicateSegments(primaryShard, shards.getReplicas()); - shards.assertAllEqual(numDocs); - - final List docs = getDocIdAndSeqNos(primaryShard); - for (IndexShard shard : shards.getReplicas()) { - assertEquals(getDocIdAndSeqNos(shard), docs); - } - for (int i = 0; i < numDocs; i++) { - // randomly delete. - if (randomBoolean()) { - shards.delete(new DeleteRequest(index.getName()).id(String.valueOf(i))); - } - } - assertEqualTranslogOperations(shards, primaryShard); - primaryShard.refresh("Test"); - replicateSegments(primaryShard, shards.getReplicas()); - final List docsAfterDelete = getDocIdAndSeqNos(primaryShard); - for (IndexShard shard : shards.getReplicas()) { - assertEquals(getDocIdAndSeqNos(shard), docsAfterDelete); - } - } - } - public void testIgnoreShardIdle() throws Exception { Settings updatedSettings = Settings.builder() - .put(settings) + .put(getIndexSettings()) .put(IndexSettings.INDEX_SEARCH_IDLE_AFTER.getKey(), TimeValue.ZERO) .build(); try (ReplicationGroup shards = createGroup(1, updatedSettings, new NRTReplicationEngineFactory())) { @@ -464,7 +405,7 @@ public void testShardIdle_Docrep() throws Exception { public void testShardIdleWithNoReplicas() throws Exception { Settings updatedSettings = Settings.builder() - .put(settings) + .put(getIndexSettings()) .put(IndexSettings.INDEX_SEARCH_IDLE_AFTER.getKey(), TimeValue.ZERO) .build(); try (ReplicationGroup shards = createGroup(0, updatedSettings, new NRTReplicationEngineFactory())) { @@ -544,132 +485,10 @@ public void testRejectCheckpointOnShardRoutingPrimary() throws IOException { closeShards(primaryShard); } - public void testReplicaReceivesGenIncrease() throws Exception { - try (ReplicationGroup shards = createGroup(1, settings, new NRTReplicationEngineFactory())) { - shards.startAll(); - final IndexShard primary = shards.getPrimary(); - final IndexShard replica = shards.getReplicas().get(0); - final int numDocs = randomIntBetween(10, 100); - shards.indexDocs(numDocs); - assertEquals(numDocs, primary.translogStats().estimatedNumberOfOperations()); - assertEquals(numDocs, replica.translogStats().estimatedNumberOfOperations()); - assertEquals(numDocs, primary.translogStats().getUncommittedOperations()); - assertEquals(numDocs, replica.translogStats().getUncommittedOperations()); - flushShard(primary, true); - replicateSegments(primary, shards.getReplicas()); - assertEquals(0, primary.translogStats().estimatedNumberOfOperations()); - assertEquals(0, replica.translogStats().estimatedNumberOfOperations()); - assertEquals(0, primary.translogStats().getUncommittedOperations()); - assertEquals(0, replica.translogStats().getUncommittedOperations()); - - final int additionalDocs = shards.indexDocs(randomIntBetween(numDocs + 1, numDocs + 10)); - - final int totalDocs = numDocs + additionalDocs; - primary.refresh("test"); - replicateSegments(primary, shards.getReplicas()); - assertEquals(additionalDocs, primary.translogStats().estimatedNumberOfOperations()); - assertEquals(additionalDocs, replica.translogStats().estimatedNumberOfOperations()); - assertEquals(additionalDocs, primary.translogStats().getUncommittedOperations()); - assertEquals(additionalDocs, replica.translogStats().getUncommittedOperations()); - flushShard(primary, true); - replicateSegments(primary, shards.getReplicas()); - - assertEqualCommittedSegments(primary, replica); - assertDocCount(primary, totalDocs); - assertDocCount(replica, totalDocs); - assertEquals(0, primary.translogStats().estimatedNumberOfOperations()); - assertEquals(0, replica.translogStats().estimatedNumberOfOperations()); - assertEquals(0, primary.translogStats().getUncommittedOperations()); - assertEquals(0, replica.translogStats().getUncommittedOperations()); - } - } - - public void testPrimaryRelocation() throws Exception { - final IndexShard primarySource = newStartedShard(true, settings); - int totalOps = randomInt(10); - for (int i = 0; i < totalOps; i++) { - indexDoc(primarySource, "_doc", Integer.toString(i)); - } - IndexShardTestCase.updateRoutingEntry(primarySource, primarySource.routingEntry().relocate(randomAlphaOfLength(10), -1)); - final IndexShard primaryTarget = newShard( - primarySource.routingEntry().getTargetRelocatingShard(), - settings, - new NRTReplicationEngineFactory() - ); - updateMappings(primaryTarget, primarySource.indexSettings().getIndexMetadata()); - - Function, List> replicatePrimaryFunction = (shardList) -> { - try { - assert shardList.size() >= 2; - final IndexShard primary = shardList.get(0); - return replicateSegments(primary, shardList.subList(1, shardList.size())); - } catch (IOException | InterruptedException e) { - throw new RuntimeException(e); - } - }; - recoverReplica(primaryTarget, primarySource, true, replicatePrimaryFunction); - - // check that local checkpoint of new primary is properly tracked after primary relocation - assertThat(primaryTarget.getLocalCheckpoint(), equalTo(totalOps - 1L)); - assertThat( - primaryTarget.getReplicationTracker() - .getTrackedLocalCheckpointForShard(primaryTarget.routingEntry().allocationId().getId()) - .getLocalCheckpoint(), - equalTo(totalOps - 1L) - ); - assertDocCount(primaryTarget, totalOps); - closeShards(primarySource, primaryTarget); - } - - public void testPrimaryRelocationWithSegRepFailure() throws Exception { - final IndexShard primarySource = newStartedShard(true, settings); - int totalOps = randomInt(10); - for (int i = 0; i < totalOps; i++) { - indexDoc(primarySource, "_doc", Integer.toString(i)); - } - IndexShardTestCase.updateRoutingEntry(primarySource, primarySource.routingEntry().relocate(randomAlphaOfLength(10), -1)); - final IndexShard primaryTarget = newShard( - primarySource.routingEntry().getTargetRelocatingShard(), - settings, - new NRTReplicationEngineFactory() - ); - updateMappings(primaryTarget, primarySource.indexSettings().getIndexMetadata()); - - Function, List> replicatePrimaryFunction = (shardList) -> { - try { - throw new IOException("Expected failure"); - } catch (IOException e) { - throw new RuntimeException(e); - } - }; - Exception e = expectThrows( - Exception.class, - () -> recoverReplica( - primaryTarget, - primarySource, - (primary, sourceNode) -> new RecoveryTarget(primary, sourceNode, new ReplicationListener() { - @Override - public void onDone(ReplicationState state) { - throw new AssertionError("recovery must fail"); - } - - @Override - public void onFailure(ReplicationState state, ReplicationFailedException e, boolean sendShardFailure) { - assertEquals(ExceptionsHelper.unwrap(e, IOException.class).getMessage(), "Expected failure"); - } - }), - true, - true, - replicatePrimaryFunction - ) - ); - closeShards(primarySource, primaryTarget); - } - // Todo: Remove this test when there is a better mechanism to test a functionality passing in different replication // strategy. public void testLockingBeforeAndAfterRelocated() throws Exception { - final IndexShard shard = newStartedShard(true, settings); + final IndexShard shard = newStartedShard(true, getIndexSettings()); final ShardRouting routing = ShardRoutingHelper.relocate(shard.routingEntry(), "other_node"); IndexShardTestCase.updateRoutingEntry(shard, routing); CountDownLatch latch = new CountDownLatch(1); @@ -702,7 +521,7 @@ public void testLockingBeforeAndAfterRelocated() throws Exception { // Todo: Remove this test when there is a better mechanism to test a functionality passing in different replication // strategy. public void testDelayedOperationsBeforeAndAfterRelocated() throws Exception { - final IndexShard shard = newStartedShard(true, settings); + final IndexShard shard = newStartedShard(true, getIndexSettings()); final ShardRouting routing = ShardRoutingHelper.relocate(shard.routingEntry(), "other_node"); IndexShardTestCase.updateRoutingEntry(shard, routing); final CountDownLatch startRecovery = new CountDownLatch(1); @@ -776,428 +595,8 @@ public void onFailure(Exception e) { closeShards(shard); } - public void testReplicaReceivesLowerGeneration() throws Exception { - // when a replica gets incoming segments that are lower than what it currently has on disk. - - // start 3 nodes Gens: P [2], R [2], R[2] - // index some docs and flush twice, push to only 1 replica. - // State Gens: P [4], R-1 [3], R-2 [2] - // Promote R-2 as the new primary and demote the old primary. - // State Gens: R[4], R-1 [3], P [4] - *commit on close of NRTEngine, xlog replayed and commit made. - // index docs on new primary and flush - // replicate to all. - // Expected result: State Gens: P[4], R-1 [4], R-2 [4] - try (ReplicationGroup shards = createGroup(2, settings, new NRTReplicationEngineFactory())) { - shards.startAll(); - final IndexShard primary = shards.getPrimary(); - final IndexShard replica_1 = shards.getReplicas().get(0); - final IndexShard replica_2 = shards.getReplicas().get(1); - int numDocs = randomIntBetween(10, 100); - shards.indexDocs(numDocs); - flushShard(primary, false); - replicateSegments(primary, List.of(replica_1)); - numDocs = randomIntBetween(numDocs + 1, numDocs + 10); - shards.indexDocs(numDocs); - flushShard(primary, false); - replicateSegments(primary, List.of(replica_1)); - - assertEqualCommittedSegments(primary, replica_1); - - shards.promoteReplicaToPrimary(replica_2).get(); - primary.close("demoted", false, false); - primary.store().close(); - IndexShard oldPrimary = shards.addReplicaWithExistingPath(primary.shardPath(), primary.routingEntry().currentNodeId()); - shards.recoverReplica(oldPrimary); - - numDocs = randomIntBetween(numDocs + 1, numDocs + 10); - shards.indexDocs(numDocs); - flushShard(replica_2, false); - replicateSegments(replica_2, shards.getReplicas()); - assertEqualCommittedSegments(replica_2, oldPrimary, replica_1); - } - } - - public void testReplicaRestarts() throws Exception { - try (ReplicationGroup shards = createGroup(3, settings, new NRTReplicationEngineFactory())) { - shards.startAll(); - IndexShard primary = shards.getPrimary(); - // 1. Create ops that are in the index and xlog of both shards but not yet part of a commit point. - final int numDocs = shards.indexDocs(randomInt(10)); - - // refresh and copy the segments over. - if (randomBoolean()) { - flushShard(primary); - } - primary.refresh("Test"); - replicateSegments(primary, shards.getReplicas()); - - // at this point both shards should have numDocs persisted and searchable. - assertDocCounts(primary, numDocs, numDocs); - for (IndexShard shard : shards.getReplicas()) { - assertDocCounts(shard, numDocs, numDocs); - } - - final int i1 = randomInt(5); - for (int i = 0; i < i1; i++) { - shards.indexDocs(randomInt(10)); - - // randomly resetart a replica - final IndexShard replicaToRestart = getRandomReplica(shards); - replicaToRestart.close("restart", false, false); - replicaToRestart.store().close(); - shards.removeReplica(replicaToRestart); - final IndexShard newReplica = shards.addReplicaWithExistingPath( - replicaToRestart.shardPath(), - replicaToRestart.routingEntry().currentNodeId() - ); - shards.recoverReplica(newReplica); - - // refresh and push segments to our other replicas. - if (randomBoolean()) { - failAndPromoteRandomReplica(shards); - } - flushShard(shards.getPrimary()); - replicateSegments(shards.getPrimary(), shards.getReplicas()); - } - primary = shards.getPrimary(); - - // refresh and push segments to our other replica. - flushShard(primary); - replicateSegments(primary, shards.getReplicas()); - - for (IndexShard shard : shards) { - assertConsistentHistoryBetweenTranslogAndLucene(shard); - } - final List docsAfterReplication = getDocIdAndSeqNos(shards.getPrimary()); - for (IndexShard shard : shards.getReplicas()) { - assertThat(shard.routingEntry().toString(), getDocIdAndSeqNos(shard), equalTo(docsAfterReplication)); - } - } - } - - public void testNRTReplicaWithRemoteStorePromotedAsPrimaryRefreshRefresh() throws Exception { - testNRTReplicaWithRemoteStorePromotedAsPrimary(false, false); - } - - public void testNRTReplicaWithRemoteStorePromotedAsPrimaryRefreshCommit() throws Exception { - testNRTReplicaWithRemoteStorePromotedAsPrimary(false, true); - } - - public void testNRTReplicaWithRemoteStorePromotedAsPrimaryCommitRefresh() throws Exception { - testNRTReplicaWithRemoteStorePromotedAsPrimary(true, false); - } - - public void testNRTReplicaWithRemoteStorePromotedAsPrimaryCommitCommit() throws Exception { - testNRTReplicaWithRemoteStorePromotedAsPrimary(true, true); - } - - private void testNRTReplicaWithRemoteStorePromotedAsPrimary(boolean performFlushFirst, boolean performFlushSecond) throws Exception { - Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) - .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) - .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, "temp-fs") - .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, "temp-fs") - .build(); - - try (ReplicationGroup shards = createGroup(1, settings, indexMapping, new NRTReplicationEngineFactory(), createTempDir())) { - shards.startAll(); - IndexShard oldPrimary = shards.getPrimary(); - final IndexShard nextPrimary = shards.getReplicas().get(0); - - // 1. Create ops that are in the index and xlog of both shards but not yet part of a commit point. - final int numDocs = shards.indexDocs(randomInt(10)); - - // refresh but do not copy the segments over. - if (performFlushFirst) { - flushShard(oldPrimary, true); - } else { - oldPrimary.refresh("Test"); - } - // replicateSegments(primary, shards.getReplicas()); - - // at this point both shards should have numDocs persisted and searchable. - assertDocCounts(oldPrimary, numDocs, numDocs); - for (IndexShard shard : shards.getReplicas()) { - assertDocCounts(shard, numDocs, 0); - } - - // 2. Create ops that are in the replica's xlog, not in the index. - // index some more into both but don't replicate. replica will have only numDocs searchable, but should have totalDocs - // persisted. - final int additonalDocs = shards.indexDocs(randomInt(10)); - final int totalDocs = numDocs + additonalDocs; - - if (performFlushSecond) { - flushShard(oldPrimary, true); - } else { - oldPrimary.refresh("Test"); - } - assertDocCounts(oldPrimary, totalDocs, totalDocs); - for (IndexShard shard : shards.getReplicas()) { - assertDocCounts(shard, totalDocs, 0); - } - assertTrue(nextPrimary.translogStats().estimatedNumberOfOperations() >= additonalDocs); - assertTrue(nextPrimary.translogStats().getUncommittedOperations() >= additonalDocs); - - int prevOperationCount = nextPrimary.translogStats().estimatedNumberOfOperations(); - - // promote the replica - shards.promoteReplicaToPrimary(nextPrimary).get(); - - // close oldPrimary. - oldPrimary.close("demoted", false, false); - oldPrimary.store().close(); - - assertEquals(InternalEngine.class, nextPrimary.getEngine().getClass()); - assertDocCounts(nextPrimary, totalDocs, totalDocs); - - // As we are downloading segments from remote segment store on failover, there should not be - // any operations replayed from translog - assertEquals(prevOperationCount, nextPrimary.translogStats().estimatedNumberOfOperations()); - - // refresh and push segments to our other replica. - nextPrimary.refresh("test"); - - for (IndexShard shard : shards) { - assertConsistentHistoryBetweenTranslogAndLucene(shard); - } - final List docsAfterRecovery = getDocIdAndSeqNos(shards.getPrimary()); - for (IndexShard shard : shards.getReplicas()) { - assertThat(shard.routingEntry().toString(), getDocIdAndSeqNos(shard), equalTo(docsAfterRecovery)); - } - } - } - - public void testNRTReplicaPromotedAsPrimary() throws Exception { - try (ReplicationGroup shards = createGroup(2, settings, new NRTReplicationEngineFactory())) { - shards.startAll(); - IndexShard oldPrimary = shards.getPrimary(); - final IndexShard nextPrimary = shards.getReplicas().get(0); - final IndexShard replica = shards.getReplicas().get(1); - - // 1. Create ops that are in the index and xlog of both shards but not yet part of a commit point. - final int numDocs = shards.indexDocs(randomInt(10)); - - // refresh and copy the segments over. - oldPrimary.refresh("Test"); - replicateSegments(oldPrimary, shards.getReplicas()); - - // at this point both shards should have numDocs persisted and searchable. - assertDocCounts(oldPrimary, numDocs, numDocs); - for (IndexShard shard : shards.getReplicas()) { - assertDocCounts(shard, numDocs, numDocs); - } - assertEqualTranslogOperations(shards, oldPrimary); - - // 2. Create ops that are in the replica's xlog, not in the index. - // index some more into both but don't replicate. replica will have only numDocs searchable, but should have totalDocs - // persisted. - final int additonalDocs = shards.indexDocs(randomInt(10)); - final int totalDocs = numDocs + additonalDocs; - - assertDocCounts(oldPrimary, totalDocs, totalDocs); - assertEqualTranslogOperations(shards, oldPrimary); - for (IndexShard shard : shards.getReplicas()) { - assertDocCounts(shard, totalDocs, numDocs); - } - assertEquals(totalDocs, oldPrimary.translogStats().estimatedNumberOfOperations()); - assertEquals(totalDocs, oldPrimary.translogStats().estimatedNumberOfOperations()); - assertEquals(totalDocs, nextPrimary.translogStats().estimatedNumberOfOperations()); - assertEquals(totalDocs, replica.translogStats().estimatedNumberOfOperations()); - assertEquals(totalDocs, nextPrimary.translogStats().getUncommittedOperations()); - assertEquals(totalDocs, replica.translogStats().getUncommittedOperations()); - - // promote the replica - shards.syncGlobalCheckpoint(); - shards.promoteReplicaToPrimary(nextPrimary); - - // close and start the oldPrimary as a replica. - oldPrimary.close("demoted", false, false); - oldPrimary.store().close(); - oldPrimary = shards.addReplicaWithExistingPath(oldPrimary.shardPath(), oldPrimary.routingEntry().currentNodeId()); - shards.recoverReplica(oldPrimary); - - assertEquals(NRTReplicationEngine.class, oldPrimary.getEngine().getClass()); - assertEquals(InternalEngine.class, nextPrimary.getEngine().getClass()); - assertDocCounts(nextPrimary, totalDocs, totalDocs); - assertEquals(0, nextPrimary.translogStats().estimatedNumberOfOperations()); - - // refresh and push segments to our other replica. - nextPrimary.refresh("test"); - replicateSegments(nextPrimary, asList(replica)); - - for (IndexShard shard : shards) { - assertConsistentHistoryBetweenTranslogAndLucene(shard); - } - final List docsAfterRecovery = getDocIdAndSeqNos(shards.getPrimary()); - for (IndexShard shard : shards.getReplicas()) { - assertThat(shard.routingEntry().toString(), getDocIdAndSeqNos(shard), equalTo(docsAfterRecovery)); - } - } - } - - public void testReplicaPromotedWhileReplicating() throws Exception { - try (ReplicationGroup shards = createGroup(1, settings, new NRTReplicationEngineFactory())) { - shards.startAll(); - final IndexShard oldPrimary = shards.getPrimary(); - final IndexShard nextPrimary = shards.getReplicas().get(0); - - final int numDocs = shards.indexDocs(randomInt(10)); - oldPrimary.refresh("Test"); - shards.syncGlobalCheckpoint(); - - final SegmentReplicationSourceFactory sourceFactory = mock(SegmentReplicationSourceFactory.class); - final SegmentReplicationTargetService targetService = newTargetService(sourceFactory); - SegmentReplicationSource source = new TestReplicationSource() { - @Override - public void getCheckpointMetadata( - long replicationId, - ReplicationCheckpoint checkpoint, - ActionListener listener - ) { - resolveCheckpointInfoResponseListener(listener, oldPrimary); - ShardRouting oldRouting = nextPrimary.shardRouting; - try { - shards.promoteReplicaToPrimary(nextPrimary); - } catch (IOException e) { - Assert.fail("Promotion should not fail"); - } - targetService.shardRoutingChanged(nextPrimary, oldRouting, nextPrimary.shardRouting); - } - - @Override - public void getSegmentFiles( - long replicationId, - ReplicationCheckpoint checkpoint, - List filesToFetch, - IndexShard indexShard, - ActionListener listener - ) { - listener.onResponse(new GetSegmentFilesResponse(Collections.emptyList())); - } - }; - when(sourceFactory.get(any())).thenReturn(source); - startReplicationAndAssertCancellation(nextPrimary, targetService); - // wait for replica to finish being promoted, and assert doc counts. - final CountDownLatch latch = new CountDownLatch(1); - nextPrimary.acquirePrimaryOperationPermit(new ActionListener<>() { - @Override - public void onResponse(Releasable releasable) { - latch.countDown(); - } - - @Override - public void onFailure(Exception e) { - throw new AssertionError(e); - } - }, ThreadPool.Names.GENERIC, ""); - latch.await(); - assertEquals(nextPrimary.getEngine().getClass(), InternalEngine.class); - nextPrimary.refresh("test"); - - oldPrimary.close("demoted", false, false); - oldPrimary.store().close(); - IndexShard newReplica = shards.addReplicaWithExistingPath(oldPrimary.shardPath(), oldPrimary.routingEntry().currentNodeId()); - shards.recoverReplica(newReplica); - - assertDocCount(nextPrimary, numDocs); - assertDocCount(newReplica, numDocs); - - nextPrimary.refresh("test"); - replicateSegments(nextPrimary, shards.getReplicas()); - final List docsAfterRecovery = getDocIdAndSeqNos(shards.getPrimary()); - for (IndexShard shard : shards.getReplicas()) { - assertThat(shard.routingEntry().toString(), getDocIdAndSeqNos(shard), equalTo(docsAfterRecovery)); - } - } - } - - public void testReplicaClosesWhileReplicating_AfterGetCheckpoint() throws Exception { - try (ReplicationGroup shards = createGroup(1, settings, new NRTReplicationEngineFactory())) { - shards.startAll(); - IndexShard primary = shards.getPrimary(); - final IndexShard replica = shards.getReplicas().get(0); - - final int numDocs = shards.indexDocs(randomInt(10)); - primary.refresh("Test"); - - final SegmentReplicationSourceFactory sourceFactory = mock(SegmentReplicationSourceFactory.class); - final SegmentReplicationTargetService targetService = newTargetService(sourceFactory); - SegmentReplicationSource source = new TestReplicationSource() { - @Override - public void getCheckpointMetadata( - long replicationId, - ReplicationCheckpoint checkpoint, - ActionListener listener - ) { - // trigger a cancellation by closing the replica. - targetService.beforeIndexShardClosed(replica.shardId, replica, Settings.EMPTY); - resolveCheckpointInfoResponseListener(listener, primary); - } - - @Override - public void getSegmentFiles( - long replicationId, - ReplicationCheckpoint checkpoint, - List filesToFetch, - IndexShard indexShard, - ActionListener listener - ) { - Assert.fail("Should not be reached"); - } - }; - when(sourceFactory.get(any())).thenReturn(source); - startReplicationAndAssertCancellation(replica, targetService); - - shards.removeReplica(replica); - closeShards(replica); - } - } - - public void testReplicaClosesWhileReplicating_AfterGetSegmentFiles() throws Exception { - try (ReplicationGroup shards = createGroup(1, settings, new NRTReplicationEngineFactory())) { - shards.startAll(); - IndexShard primary = shards.getPrimary(); - final IndexShard replica = shards.getReplicas().get(0); - - final int numDocs = shards.indexDocs(randomInt(10)); - primary.refresh("Test"); - - final SegmentReplicationSourceFactory sourceFactory = mock(SegmentReplicationSourceFactory.class); - final SegmentReplicationTargetService targetService = newTargetService(sourceFactory); - SegmentReplicationSource source = new TestReplicationSource() { - @Override - public void getCheckpointMetadata( - long replicationId, - ReplicationCheckpoint checkpoint, - ActionListener listener - ) { - resolveCheckpointInfoResponseListener(listener, primary); - } - - @Override - public void getSegmentFiles( - long replicationId, - ReplicationCheckpoint checkpoint, - List filesToFetch, - IndexShard indexShard, - ActionListener listener - ) { - // randomly resolve the listener, indicating the source has resolved. - listener.onResponse(new GetSegmentFilesResponse(Collections.emptyList())); - targetService.beforeIndexShardClosed(replica.shardId, replica, Settings.EMPTY); - } - }; - when(sourceFactory.get(any())).thenReturn(source); - startReplicationAndAssertCancellation(replica, targetService); - - shards.removeReplica(replica); - closeShards(replica); - } - } - public void testCloseShardDuringFinalize() throws Exception { - try (ReplicationGroup shards = createGroup(1, settings, new NRTReplicationEngineFactory())) { + try (ReplicationGroup shards = createGroup(1, getIndexSettings(), new NRTReplicationEngineFactory())) { shards.startAll(); IndexShard primary = shards.getPrimary(); final IndexShard replica = shards.getReplicas().get(0); @@ -1211,60 +610,8 @@ public void testCloseShardDuringFinalize() throws Exception { } } - public void testCloseShardWhileGettingCheckpoint() throws Exception { - try (ReplicationGroup shards = createGroup(1, settings, new NRTReplicationEngineFactory())) { - shards.startAll(); - IndexShard primary = shards.getPrimary(); - final IndexShard replica = shards.getReplicas().get(0); - - primary.refresh("Test"); - - final SegmentReplicationSourceFactory sourceFactory = mock(SegmentReplicationSourceFactory.class); - final SegmentReplicationTargetService targetService = newTargetService(sourceFactory); - SegmentReplicationSource source = new TestReplicationSource() { - - ActionListener listener; - - @Override - public void getCheckpointMetadata( - long replicationId, - ReplicationCheckpoint checkpoint, - ActionListener listener - ) { - // set the listener, we will only fail it once we cancel the source. - this.listener = listener; - // shard is closing while we are copying files. - targetService.beforeIndexShardClosed(replica.shardId, replica, Settings.EMPTY); - } - - @Override - public void getSegmentFiles( - long replicationId, - ReplicationCheckpoint checkpoint, - List filesToFetch, - IndexShard indexShard, - ActionListener listener - ) { - Assert.fail("Unreachable"); - } - - @Override - public void cancel() { - // simulate listener resolving, but only after we have issued a cancel from beforeIndexShardClosed . - final RuntimeException exception = new CancellableThreads.ExecutionCancelledException("retryable action was cancelled"); - listener.onFailure(exception); - } - }; - when(sourceFactory.get(any())).thenReturn(source); - startReplicationAndAssertCancellation(replica, targetService); - - shards.removeReplica(replica); - closeShards(replica); - } - } - public void testBeforeIndexShardClosedWhileCopyingFiles() throws Exception { - try (ReplicationGroup shards = createGroup(1, settings, new NRTReplicationEngineFactory())) { + try (ReplicationGroup shards = createGroup(1, getIndexSettings(), new NRTReplicationEngineFactory())) { shards.startAll(); IndexShard primary = shards.getPrimary(); final IndexShard replica = shards.getReplicas().get(0); @@ -1315,45 +662,7 @@ public void cancel() { } } - public void testPrimaryCancelsExecution() throws Exception { - try (ReplicationGroup shards = createGroup(1, settings, new NRTReplicationEngineFactory())) { - shards.startAll(); - IndexShard primary = shards.getPrimary(); - final IndexShard replica = shards.getReplicas().get(0); - - final int numDocs = shards.indexDocs(randomInt(10)); - primary.refresh("Test"); - - final SegmentReplicationSourceFactory sourceFactory = mock(SegmentReplicationSourceFactory.class); - final SegmentReplicationTargetService targetService = newTargetService(sourceFactory); - SegmentReplicationSource source = new TestReplicationSource() { - @Override - public void getCheckpointMetadata( - long replicationId, - ReplicationCheckpoint checkpoint, - ActionListener listener - ) { - listener.onFailure(new CancellableThreads.ExecutionCancelledException("Cancelled")); - } - - @Override - public void getSegmentFiles( - long replicationId, - ReplicationCheckpoint checkpoint, - List filesToFetch, - IndexShard indexShard, - ActionListener listener - ) {} - }; - when(sourceFactory.get(any())).thenReturn(source); - startReplicationAndAssertCancellation(replica, targetService); - - shards.removeReplica(replica); - closeShards(replica); - } - } - - private SegmentReplicationTargetService newTargetService(SegmentReplicationSourceFactory sourceFactory) { + protected SegmentReplicationTargetService newTargetService(SegmentReplicationSourceFactory sourceFactory) { return new SegmentReplicationTargetService( threadPool, new RecoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), @@ -1368,14 +677,15 @@ private SegmentReplicationTargetService newTargetService(SegmentReplicationSourc * Assert persisted and searchable doc counts. This method should not be used while docs are concurrently indexed because * it asserts point in time seqNos are relative to the doc counts. */ - private void assertDocCounts(IndexShard indexShard, int expectedPersistedDocCount, int expectedSearchableDocCount) throws IOException { + protected void assertDocCounts(IndexShard indexShard, int expectedPersistedDocCount, int expectedSearchableDocCount) + throws IOException { assertDocCount(indexShard, expectedSearchableDocCount); // assigned seqNos start at 0, so assert max & local seqNos are 1 less than our persisted doc count. assertEquals(expectedPersistedDocCount - 1, indexShard.seqNoStats().getMaxSeqNo()); assertEquals(expectedPersistedDocCount - 1, indexShard.seqNoStats().getLocalCheckpoint()); } - private void resolveCheckpointInfoResponseListener(ActionListener listener, IndexShard primary) { + protected void resolveCheckpointInfoResponseListener(ActionListener listener, IndexShard primary) { try { final CopyState copyState = new CopyState( ReplicationCheckpoint.empty(primary.shardId, primary.getLatestReplicationCheckpoint().getCodec()), @@ -1390,7 +700,7 @@ private void resolveCheckpointInfoResponseListener(ActionListener operations = new ArrayList<>(); Translog.Operation op; diff --git a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationWithNodeToNodeIndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationWithNodeToNodeIndexShardTests.java new file mode 100644 index 0000000000000..69846fbbe1dd4 --- /dev/null +++ b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationWithNodeToNodeIndexShardTests.java @@ -0,0 +1,697 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.shard; + +import org.apache.lucene.index.SegmentInfos; +import org.junit.Assert; +import org.opensearch.action.ActionListener; +import org.opensearch.action.admin.indices.flush.FlushRequest; +import org.opensearch.action.delete.DeleteRequest; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.cluster.routing.ShardRouting; +import org.opensearch.common.concurrent.GatedCloseable; +import org.opensearch.common.lease.Releasable; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.CancellableThreads; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.engine.DocIdSeqNoAndSource; +import org.opensearch.index.engine.InternalEngine; +import org.opensearch.index.engine.NRTReplicationEngine; +import org.opensearch.index.engine.NRTReplicationEngineFactory; +import org.opensearch.index.mapper.MapperService; +import org.opensearch.index.replication.TestReplicationSource; +import org.opensearch.index.store.StoreFileMetadata; +import org.opensearch.indices.IndicesService; +import org.opensearch.indices.recovery.RecoverySettings; +import org.opensearch.indices.replication.CheckpointInfoResponse; +import org.opensearch.indices.replication.GetSegmentFilesResponse; +import org.opensearch.indices.replication.SegmentReplicationSource; +import org.opensearch.indices.replication.SegmentReplicationSourceFactory; +import org.opensearch.indices.replication.SegmentReplicationTarget; +import org.opensearch.indices.replication.SegmentReplicationTargetService; +import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static java.util.Arrays.asList; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class SegmentReplicationWithNodeToNodeIndexShardTests extends SegmentReplicationIndexShardTests { + + public void testReplicaClosesWhileReplicating_AfterGetSegmentFiles() throws Exception { + try (ReplicationGroup shards = createGroup(1, getIndexSettings(), new NRTReplicationEngineFactory())) { + shards.startAll(); + IndexShard primary = shards.getPrimary(); + final IndexShard replica = shards.getReplicas().get(0); + + final int numDocs = shards.indexDocs(randomInt(10)); + primary.refresh("Test"); + + final SegmentReplicationSourceFactory sourceFactory = mock(SegmentReplicationSourceFactory.class); + final SegmentReplicationTargetService targetService = newTargetService(sourceFactory); + SegmentReplicationSource source = new TestReplicationSource() { + @Override + public void getCheckpointMetadata( + long replicationId, + ReplicationCheckpoint checkpoint, + ActionListener listener + ) { + resolveCheckpointInfoResponseListener(listener, primary); + } + + @Override + public void getSegmentFiles( + long replicationId, + ReplicationCheckpoint checkpoint, + List filesToFetch, + IndexShard indexShard, + ActionListener listener + ) { + // randomly resolve the listener, indicating the source has resolved. + listener.onResponse(new GetSegmentFilesResponse(Collections.emptyList())); + targetService.beforeIndexShardClosed(replica.shardId, replica, Settings.EMPTY); + } + }; + when(sourceFactory.get(any())).thenReturn(source); + startReplicationAndAssertCancellation(replica, targetService); + + shards.removeReplica(replica); + closeShards(replica); + } + } + + public void testReplicaClosesWhileReplicating_AfterGetCheckpoint() throws Exception { + try (ReplicationGroup shards = createGroup(1, getIndexSettings(), new NRTReplicationEngineFactory())) { + shards.startAll(); + IndexShard primary = shards.getPrimary(); + final IndexShard replica = shards.getReplicas().get(0); + + final int numDocs = shards.indexDocs(randomInt(10)); + primary.refresh("Test"); + + final SegmentReplicationSourceFactory sourceFactory = mock(SegmentReplicationSourceFactory.class); + final SegmentReplicationTargetService targetService = newTargetService(sourceFactory); + SegmentReplicationSource source = new TestReplicationSource() { + @Override + public void getCheckpointMetadata( + long replicationId, + ReplicationCheckpoint checkpoint, + ActionListener listener + ) { + // trigger a cancellation by closing the replica. + targetService.beforeIndexShardClosed(replica.shardId, replica, Settings.EMPTY); + resolveCheckpointInfoResponseListener(listener, primary); + } + + @Override + public void getSegmentFiles( + long replicationId, + ReplicationCheckpoint checkpoint, + List filesToFetch, + IndexShard indexShard, + ActionListener listener + ) { + Assert.fail("Should not be reached"); + } + }; + when(sourceFactory.get(any())).thenReturn(source); + startReplicationAndAssertCancellation(replica, targetService); + + shards.removeReplica(replica); + closeShards(replica); + } + } + + public void testCloseShardWhileGettingCheckpoint() throws Exception { + try (ReplicationGroup shards = createGroup(1, getIndexSettings(), new NRTReplicationEngineFactory())) { + shards.startAll(); + IndexShard primary = shards.getPrimary(); + final IndexShard replica = shards.getReplicas().get(0); + + primary.refresh("Test"); + + final SegmentReplicationSourceFactory sourceFactory = mock(SegmentReplicationSourceFactory.class); + final SegmentReplicationTargetService targetService = newTargetService(sourceFactory); + SegmentReplicationSource source = new TestReplicationSource() { + + ActionListener listener; + + @Override + public void getCheckpointMetadata( + long replicationId, + ReplicationCheckpoint checkpoint, + ActionListener listener + ) { + // set the listener, we will only fail it once we cancel the source. + this.listener = listener; + // shard is closing while we are copying files. + targetService.beforeIndexShardClosed(replica.shardId, replica, Settings.EMPTY); + } + + @Override + public void getSegmentFiles( + long replicationId, + ReplicationCheckpoint checkpoint, + List filesToFetch, + IndexShard indexShard, + ActionListener listener + ) { + Assert.fail("Unreachable"); + } + + @Override + public void cancel() { + // simulate listener resolving, but only after we have issued a cancel from beforeIndexShardClosed . + final RuntimeException exception = new CancellableThreads.ExecutionCancelledException("retryable action was cancelled"); + listener.onFailure(exception); + } + }; + when(sourceFactory.get(any())).thenReturn(source); + startReplicationAndAssertCancellation(replica, targetService); + + shards.removeReplica(replica); + closeShards(replica); + } + } + + public void testPrimaryCancelsExecution() throws Exception { + try (ReplicationGroup shards = createGroup(1, getIndexSettings(), new NRTReplicationEngineFactory())) { + shards.startAll(); + IndexShard primary = shards.getPrimary(); + final IndexShard replica = shards.getReplicas().get(0); + + final int numDocs = shards.indexDocs(randomInt(10)); + primary.refresh("Test"); + + final SegmentReplicationSourceFactory sourceFactory = mock(SegmentReplicationSourceFactory.class); + final SegmentReplicationTargetService targetService = newTargetService(sourceFactory); + SegmentReplicationSource source = new TestReplicationSource() { + @Override + public void getCheckpointMetadata( + long replicationId, + ReplicationCheckpoint checkpoint, + ActionListener listener + ) { + listener.onFailure(new CancellableThreads.ExecutionCancelledException("Cancelled")); + } + + @Override + public void getSegmentFiles( + long replicationId, + ReplicationCheckpoint checkpoint, + List filesToFetch, + IndexShard indexShard, + ActionListener listener + ) {} + }; + when(sourceFactory.get(any())).thenReturn(source); + startReplicationAndAssertCancellation(replica, targetService); + + shards.removeReplica(replica); + closeShards(replica); + } + } + + public void testReplicaPromotedWhileReplicating() throws Exception { + try (ReplicationGroup shards = createGroup(1, getIndexSettings(), new NRTReplicationEngineFactory())) { + shards.startAll(); + final IndexShard oldPrimary = shards.getPrimary(); + final IndexShard nextPrimary = shards.getReplicas().get(0); + + final int numDocs = shards.indexDocs(randomInt(10)); + oldPrimary.refresh("Test"); + shards.syncGlobalCheckpoint(); + + final SegmentReplicationSourceFactory sourceFactory = mock(SegmentReplicationSourceFactory.class); + final SegmentReplicationTargetService targetService = newTargetService(sourceFactory); + SegmentReplicationSource source = new TestReplicationSource() { + @Override + public void getCheckpointMetadata( + long replicationId, + ReplicationCheckpoint checkpoint, + ActionListener listener + ) { + resolveCheckpointInfoResponseListener(listener, oldPrimary); + ShardRouting oldRouting = nextPrimary.shardRouting; + try { + shards.promoteReplicaToPrimary(nextPrimary); + } catch (IOException e) { + Assert.fail("Promotion should not fail"); + } + targetService.shardRoutingChanged(nextPrimary, oldRouting, nextPrimary.shardRouting); + } + + @Override + public void getSegmentFiles( + long replicationId, + ReplicationCheckpoint checkpoint, + List filesToFetch, + IndexShard indexShard, + ActionListener listener + ) { + listener.onResponse(new GetSegmentFilesResponse(Collections.emptyList())); + } + }; + when(sourceFactory.get(any())).thenReturn(source); + startReplicationAndAssertCancellation(nextPrimary, targetService); + // wait for replica to finish being promoted, and assert doc counts. + final CountDownLatch latch = new CountDownLatch(1); + nextPrimary.acquirePrimaryOperationPermit(new ActionListener<>() { + @Override + public void onResponse(Releasable releasable) { + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }, ThreadPool.Names.GENERIC, ""); + latch.await(); + assertEquals(nextPrimary.getEngine().getClass(), InternalEngine.class); + nextPrimary.refresh("test"); + + oldPrimary.close("demoted", false, false); + oldPrimary.store().close(); + IndexShard newReplica = shards.addReplicaWithExistingPath(oldPrimary.shardPath(), oldPrimary.routingEntry().currentNodeId()); + shards.recoverReplica(newReplica); + + assertDocCount(nextPrimary, numDocs); + assertDocCount(newReplica, numDocs); + + nextPrimary.refresh("test"); + replicateSegments(nextPrimary, shards.getReplicas()); + final List docsAfterRecovery = getDocIdAndSeqNos(shards.getPrimary()); + for (IndexShard shard : shards.getReplicas()) { + assertThat(shard.routingEntry().toString(), getDocIdAndSeqNos(shard), equalTo(docsAfterRecovery)); + } + } + } + + public void testReplicaReceivesGenIncrease() throws Exception { + try (ReplicationGroup shards = createGroup(1, getIndexSettings(), new NRTReplicationEngineFactory())) { + shards.startAll(); + final IndexShard primary = shards.getPrimary(); + final IndexShard replica = shards.getReplicas().get(0); + final int numDocs = randomIntBetween(10, 100); + shards.indexDocs(numDocs); + assertEquals(numDocs, primary.translogStats().estimatedNumberOfOperations()); + assertEquals(numDocs, replica.translogStats().estimatedNumberOfOperations()); + assertEquals(numDocs, primary.translogStats().getUncommittedOperations()); + assertEquals(numDocs, replica.translogStats().getUncommittedOperations()); + flushShard(primary, true); + replicateSegments(primary, shards.getReplicas()); + assertEquals(0, primary.translogStats().estimatedNumberOfOperations()); + assertEquals(0, replica.translogStats().estimatedNumberOfOperations()); + assertEquals(0, primary.translogStats().getUncommittedOperations()); + assertEquals(0, replica.translogStats().getUncommittedOperations()); + + final int additionalDocs = shards.indexDocs(randomIntBetween(numDocs + 1, numDocs + 10)); + + final int totalDocs = numDocs + additionalDocs; + primary.refresh("test"); + replicateSegments(primary, shards.getReplicas()); + assertEquals(additionalDocs, primary.translogStats().estimatedNumberOfOperations()); + assertEquals(additionalDocs, replica.translogStats().estimatedNumberOfOperations()); + assertEquals(additionalDocs, primary.translogStats().getUncommittedOperations()); + assertEquals(additionalDocs, replica.translogStats().getUncommittedOperations()); + flushShard(primary, true); + replicateSegments(primary, shards.getReplicas()); + + assertEqualCommittedSegments(primary, replica); + assertDocCount(primary, totalDocs); + assertDocCount(replica, totalDocs); + assertEquals(0, primary.translogStats().estimatedNumberOfOperations()); + assertEquals(0, replica.translogStats().estimatedNumberOfOperations()); + assertEquals(0, primary.translogStats().getUncommittedOperations()); + assertEquals(0, replica.translogStats().getUncommittedOperations()); + } + } + + /** + * Verifies that commits on replica engine resulting from engine or reader close does not cleanup the temporary + * replication files from ongoing round of segment replication + * @throws Exception + */ + public void testTemporaryFilesNotCleanup() throws Exception { + String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"foo\": { \"type\": \"keyword\"} }}}"; + try (ReplicationGroup shards = createGroup(1, getIndexSettings(), mappings, new NRTReplicationEngineFactory())) { + shards.startAll(); + IndexShard primaryShard = shards.getPrimary(); + final IndexShard replica = shards.getReplicas().get(0); + + // Step 1. Ingest numDocs documents, commit to create commit point on primary & replicate + final int numDocs = randomIntBetween(100, 200); + logger.info("--> Inserting documents {}", numDocs); + for (int i = 0; i < numDocs; i++) { + shards.index(new IndexRequest(index.getName()).id(String.valueOf(i)).source("{\"foo\": \"bar\"}", XContentType.JSON)); + } + assertEqualTranslogOperations(shards, primaryShard); + primaryShard.flush(new FlushRequest().waitIfOngoing(true).force(true)); + replicateSegments(primaryShard, shards.getReplicas()); + shards.assertAllEqual(numDocs); + + // Step 2. Ingest numDocs documents again to create a new commit on primary + logger.info("--> Ingest {} docs again", numDocs); + for (int i = 0; i < numDocs; i++) { + shards.index(new IndexRequest(index.getName()).id(String.valueOf(i)).source("{\"foo\": \"bar\"}", XContentType.JSON)); + } + assertEqualTranslogOperations(shards, primaryShard); + primaryShard.flush(new FlushRequest().waitIfOngoing(true).force(true)); + + // Step 3. Copy segment files to replica shard but prevent commit + final CountDownLatch countDownLatch = new CountDownLatch(1); + Map primaryMetadata; + try (final GatedCloseable segmentInfosSnapshot = primaryShard.getSegmentInfosSnapshot()) { + final SegmentInfos primarySegmentInfos = segmentInfosSnapshot.get(); + primaryMetadata = primaryShard.store().getSegmentMetadataMap(primarySegmentInfos); + } + final SegmentReplicationSourceFactory sourceFactory = mock(SegmentReplicationSourceFactory.class); + final IndicesService indicesService = mock(IndicesService.class); + when(indicesService.getShardOrNull(replica.shardId)).thenReturn(replica); + final SegmentReplicationTargetService targetService = new SegmentReplicationTargetService( + threadPool, + new RecoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), + mock(TransportService.class), + sourceFactory, + indicesService, + clusterService + ); + final Consumer runnablePostGetFiles = (indexShard) -> { + try { + Collection temporaryFiles = Stream.of(indexShard.store().directory().listAll()) + .filter(name -> name.startsWith(SegmentReplicationTarget.REPLICATION_PREFIX)) + .collect(Collectors.toList()); + + // Step 4. Perform a commit on replica shard. + NRTReplicationEngine engine = (NRTReplicationEngine) indexShard.getEngine(); + engine.updateSegments(engine.getSegmentInfosSnapshot().get()); + + // Step 5. Validate temporary files are not deleted from store. + Collection replicaStoreFiles = List.of(indexShard.store().directory().listAll()); + assertTrue(replicaStoreFiles.containsAll(temporaryFiles)); + } catch (IOException e) { + throw new RuntimeException(e); + } + }; + SegmentReplicationSource segmentReplicationSource = getSegmentReplicationSource( + primaryShard, + (repId) -> targetService.get(repId), + runnablePostGetFiles + ); + when(sourceFactory.get(any())).thenReturn(segmentReplicationSource); + targetService.startReplication(replica, getTargetListener(primaryShard, replica, primaryMetadata, countDownLatch)); + countDownLatch.await(30, TimeUnit.SECONDS); + assertEquals("Replication failed", 0, countDownLatch.getCount()); + shards.assertAllEqual(numDocs); + } + } + + // Todo: Move this test to SegmentReplicationIndexShardTests so that it runs for both node-node & remote store + public void testReplicaReceivesLowerGeneration() throws Exception { + // when a replica gets incoming segments that are lower than what it currently has on disk. + + // start 3 nodes Gens: P [2], R [2], R[2] + // index some docs and flush twice, push to only 1 replica. + // State Gens: P [4], R-1 [3], R-2 [2] + // Promote R-2 as the new primary and demote the old primary. + // State Gens: R[4], R-1 [3], P [4] - *commit on close of NRTEngine, xlog replayed and commit made. + // index docs on new primary and flush + // replicate to all. + // Expected result: State Gens: P[4], R-1 [4], R-2 [4] + try (ReplicationGroup shards = createGroup(2, getIndexSettings(), new NRTReplicationEngineFactory())) { + shards.startAll(); + final IndexShard primary = shards.getPrimary(); + final IndexShard replica_1 = shards.getReplicas().get(0); + final IndexShard replica_2 = shards.getReplicas().get(1); + int numDocs = randomIntBetween(10, 100); + shards.indexDocs(numDocs); + flushShard(primary, false); + replicateSegments(primary, List.of(replica_1)); + numDocs = randomIntBetween(numDocs + 1, numDocs + 10); + shards.indexDocs(numDocs); + flushShard(primary, false); + replicateSegments(primary, List.of(replica_1)); + + assertEqualCommittedSegments(primary, replica_1); + + shards.promoteReplicaToPrimary(replica_2).get(); + primary.close("demoted", false, false); + primary.store().close(); + IndexShard oldPrimary = shards.addReplicaWithExistingPath(primary.shardPath(), primary.routingEntry().currentNodeId()); + shards.recoverReplica(oldPrimary); + + numDocs = randomIntBetween(numDocs + 1, numDocs + 10); + shards.indexDocs(numDocs); + flushShard(replica_2, false); + replicateSegments(replica_2, shards.getReplicas()); + assertEqualCommittedSegments(replica_2, oldPrimary, replica_1); + } + } + + // Todo: Move this test to SegmentReplicationIndexShardTests so that it runs for both node-node & remote store + public void testPrimaryRelocation() throws Exception { + final IndexShard primarySource = newStartedShard(true, getIndexSettings()); + int totalOps = randomInt(10); + for (int i = 0; i < totalOps; i++) { + indexDoc(primarySource, "_doc", Integer.toString(i)); + } + IndexShardTestCase.updateRoutingEntry(primarySource, primarySource.routingEntry().relocate(randomAlphaOfLength(10), -1)); + final IndexShard primaryTarget = newShard( + primarySource.routingEntry().getTargetRelocatingShard(), + getIndexSettings(), + new NRTReplicationEngineFactory() + ); + updateMappings(primaryTarget, primarySource.indexSettings().getIndexMetadata()); + + Function, List> replicatePrimaryFunction = (shardList) -> { + try { + assert shardList.size() >= 2; + final IndexShard primary = shardList.get(0); + return replicateSegments(primary, shardList.subList(1, shardList.size())); + } catch (IOException | InterruptedException e) { + throw new RuntimeException(e); + } + }; + recoverReplica(primaryTarget, primarySource, true, replicatePrimaryFunction); + + // check that local checkpoint of new primary is properly tracked after primary relocation + assertThat(primaryTarget.getLocalCheckpoint(), equalTo(totalOps - 1L)); + assertThat( + primaryTarget.getReplicationTracker() + .getTrackedLocalCheckpointForShard(primaryTarget.routingEntry().allocationId().getId()) + .getLocalCheckpoint(), + equalTo(totalOps - 1L) + ); + assertDocCount(primaryTarget, totalOps); + closeShards(primarySource, primaryTarget); + } + + // Todo: Move this test to SegmentReplicationIndexShardTests so that it runs for both node-node & remote store + public void testNRTReplicaPromotedAsPrimary() throws Exception { + try (ReplicationGroup shards = createGroup(2, getIndexSettings(), new NRTReplicationEngineFactory())) { + shards.startAll(); + IndexShard oldPrimary = shards.getPrimary(); + final IndexShard nextPrimary = shards.getReplicas().get(0); + final IndexShard replica = shards.getReplicas().get(1); + + // 1. Create ops that are in the index and xlog of both shards but not yet part of a commit point. + final int numDocs = shards.indexDocs(randomInt(10)); + + // refresh and copy the segments over. + oldPrimary.refresh("Test"); + replicateSegments(oldPrimary, shards.getReplicas()); + + // at this point both shards should have numDocs persisted and searchable. + assertDocCounts(oldPrimary, numDocs, numDocs); + for (IndexShard shard : shards.getReplicas()) { + assertDocCounts(shard, numDocs, numDocs); + } + assertEqualTranslogOperations(shards, oldPrimary); + + // 2. Create ops that are in the replica's xlog, not in the index. + // index some more into both but don't replicate. replica will have only numDocs searchable, but should have totalDocs + // persisted. + final int additonalDocs = shards.indexDocs(randomInt(10)); + final int totalDocs = numDocs + additonalDocs; + + assertDocCounts(oldPrimary, totalDocs, totalDocs); + assertEqualTranslogOperations(shards, oldPrimary); + for (IndexShard shard : shards.getReplicas()) { + assertDocCounts(shard, totalDocs, numDocs); + } + assertEquals(totalDocs, oldPrimary.translogStats().estimatedNumberOfOperations()); + assertEquals(totalDocs, oldPrimary.translogStats().estimatedNumberOfOperations()); + assertEquals(totalDocs, nextPrimary.translogStats().estimatedNumberOfOperations()); + assertEquals(totalDocs, replica.translogStats().estimatedNumberOfOperations()); + assertEquals(totalDocs, nextPrimary.translogStats().getUncommittedOperations()); + assertEquals(totalDocs, replica.translogStats().getUncommittedOperations()); + + // promote the replica + shards.syncGlobalCheckpoint(); + shards.promoteReplicaToPrimary(nextPrimary); + + // close and start the oldPrimary as a replica. + oldPrimary.close("demoted", false, false); + oldPrimary.store().close(); + oldPrimary = shards.addReplicaWithExistingPath(oldPrimary.shardPath(), oldPrimary.routingEntry().currentNodeId()); + shards.recoverReplica(oldPrimary); + + assertEquals(NRTReplicationEngine.class, oldPrimary.getEngine().getClass()); + assertEquals(InternalEngine.class, nextPrimary.getEngine().getClass()); + assertDocCounts(nextPrimary, totalDocs, totalDocs); + assertEquals(0, nextPrimary.translogStats().estimatedNumberOfOperations()); + + // refresh and push segments to our other replica. + nextPrimary.refresh("test"); + replicateSegments(nextPrimary, asList(replica)); + + for (IndexShard shard : shards) { + assertConsistentHistoryBetweenTranslogAndLucene(shard); + } + final List docsAfterRecovery = getDocIdAndSeqNos(shards.getPrimary()); + for (IndexShard shard : shards.getReplicas()) { + assertThat(shard.routingEntry().toString(), getDocIdAndSeqNos(shard), equalTo(docsAfterRecovery)); + } + } + } + + // Todo: Move this test to SegmentReplicationIndexShardTests so that it runs for both node-node & remote store + public void testReplicaRestarts() throws Exception { + try (ReplicationGroup shards = createGroup(3, getIndexSettings(), new NRTReplicationEngineFactory())) { + shards.startAll(); + IndexShard primary = shards.getPrimary(); + // 1. Create ops that are in the index and xlog of both shards but not yet part of a commit point. + final int numDocs = shards.indexDocs(randomInt(10)); + logger.info("--> Index {} documents on primary", numDocs); + + // refresh and copy the segments over. + if (randomBoolean()) { + flushShard(primary); + } + primary.refresh("Test"); + logger.info("--> Replicate segments"); + replicateSegments(primary, shards.getReplicas()); + + // at this point both shards should have numDocs persisted and searchable. + logger.info("--> Verify doc count"); + assertDocCounts(primary, numDocs, numDocs); + for (IndexShard shard : shards.getReplicas()) { + assertDocCounts(shard, numDocs, numDocs); + } + + final int i1 = randomInt(5); + logger.info("--> Index {} more docs", i1); + for (int i = 0; i < i1; i++) { + shards.indexDocs(randomInt(10)); + + // randomly restart a replica + final IndexShard replicaToRestart = getRandomReplica(shards); + logger.info("--> Restarting replica {}", replicaToRestart.shardId); + replicaToRestart.close("restart", false, false); + replicaToRestart.store().close(); + shards.removeReplica(replicaToRestart); + final IndexShard newReplica = shards.addReplicaWithExistingPath( + replicaToRestart.shardPath(), + replicaToRestart.routingEntry().currentNodeId() + ); + logger.info("--> Recover newReplica {}", newReplica.shardId); + shards.recoverReplica(newReplica); + + // refresh and push segments to our other replicas. + if (randomBoolean()) { + failAndPromoteRandomReplica(shards); + } + flushShard(shards.getPrimary()); + replicateSegments(shards.getPrimary(), shards.getReplicas()); + } + primary = shards.getPrimary(); + + // refresh and push segments to our other replica. + flushShard(primary); + replicateSegments(primary, shards.getReplicas()); + + for (IndexShard shard : shards) { + assertConsistentHistoryBetweenTranslogAndLucene(shard); + } + final List docsAfterReplication = getDocIdAndSeqNos(shards.getPrimary()); + for (IndexShard shard : shards.getReplicas()) { + assertThat(shard.routingEntry().toString(), getDocIdAndSeqNos(shard), equalTo(docsAfterReplication)); + } + } + } + + // Todo: Move this test to SegmentReplicationIndexShardTests so that it runs for both node-node & remote store + public void testSegmentReplication_Index_Update_Delete() throws Exception { + String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"foo\": { \"type\": \"keyword\"} }}}"; + try (ReplicationGroup shards = createGroup(2, getIndexSettings(), mappings, new NRTReplicationEngineFactory())) { + shards.startAll(); + final IndexShard primaryShard = shards.getPrimary(); + + final int numDocs = randomIntBetween(100, 200); + for (int i = 0; i < numDocs; i++) { + shards.index(new IndexRequest(index.getName()).id(String.valueOf(i)).source("{\"foo\": \"bar\"}", XContentType.JSON)); + } + + assertEqualTranslogOperations(shards, primaryShard); + primaryShard.refresh("Test"); + replicateSegments(primaryShard, shards.getReplicas()); + + shards.assertAllEqual(numDocs); + + for (int i = 0; i < numDocs; i++) { + // randomly update docs. + if (randomBoolean()) { + shards.index( + new IndexRequest(index.getName()).id(String.valueOf(i)).source("{ \"foo\" : \"baz\" }", XContentType.JSON) + ); + } + } + assertEqualTranslogOperations(shards, primaryShard); + primaryShard.refresh("Test"); + replicateSegments(primaryShard, shards.getReplicas()); + shards.assertAllEqual(numDocs); + + final List docs = getDocIdAndSeqNos(primaryShard); + for (IndexShard shard : shards.getReplicas()) { + assertEquals(getDocIdAndSeqNos(shard), docs); + } + for (int i = 0; i < numDocs; i++) { + // randomly delete. + if (randomBoolean()) { + shards.delete(new DeleteRequest(index.getName()).id(String.valueOf(i))); + } + } + assertEqualTranslogOperations(shards, primaryShard); + primaryShard.refresh("Test"); + replicateSegments(primaryShard, shards.getReplicas()); + final List docsAfterDelete = getDocIdAndSeqNos(primaryShard); + for (IndexShard shard : shards.getReplicas()) { + assertEquals(getDocIdAndSeqNos(shard), docsAfterDelete); + } + } + } + +} diff --git a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationWithRemoteIndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationWithRemoteIndexShardTests.java index a67b60d6128d1..b15d8b66fca55 100644 --- a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationWithRemoteIndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationWithRemoteIndexShardTests.java @@ -8,36 +8,131 @@ package org.opensearch.index.shard; +import org.junit.Before; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; +import org.opensearch.index.engine.DocIdSeqNoAndSource; +import org.opensearch.index.engine.InternalEngine; import org.opensearch.index.engine.NRTReplicationEngineFactory; -import org.opensearch.index.replication.OpenSearchIndexLevelReplicationTestCase; import org.opensearch.indices.replication.common.ReplicationType; import java.io.IOException; +import java.util.List; -public class SegmentReplicationWithRemoteIndexShardTests extends OpenSearchIndexLevelReplicationTestCase { +import static org.hamcrest.Matchers.equalTo; + +public class SegmentReplicationWithRemoteIndexShardTests extends SegmentReplicationIndexShardTests { + + private static final String REPOSITORY_NAME = "temp-fs"; private static final Settings settings = Settings.builder() .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) - .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, "temp-fs") - .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, "temp-fs") + .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, REPOSITORY_NAME) + .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, REPOSITORY_NAME) .build(); - public void testReplicaSyncingFromRemoteStore() throws IOException { - ReplicationGroup shards = createGroup(1, settings, indexMapping, new NRTReplicationEngineFactory(), createTempDir()); - final IndexShard primaryShard = shards.getPrimary(); - final IndexShard replicaShard = shards.getReplicas().get(0); - shards.startPrimary(); - shards.startAll(); - indexDoc(primaryShard, "_doc", "1"); - indexDoc(primaryShard, "_doc", "2"); - primaryShard.refresh("test"); - assertDocs(primaryShard, "1", "2"); - flushShard(primaryShard); - - replicaShard.syncSegmentsFromRemoteSegmentStore(true, true, false); - assertDocs(replicaShard, "1", "2"); - closeShards(primaryShard, replicaShard); + @Before + public void setup() { + // Todo: Remove feature flag once remote store integration with segrep goes GA + FeatureFlags.initializeFeatureFlags( + Settings.builder().put(FeatureFlags.SEGMENT_REPLICATION_EXPERIMENTAL_SETTING.getKey(), "true").build() + ); + } + + protected Settings getIndexSettings() { + return settings; + } + + protected ReplicationGroup getReplicationGroup(int numberOfReplicas) throws IOException { + return createGroup(numberOfReplicas, settings, indexMapping, new NRTReplicationEngineFactory(), createTempDir()); + } + + public void testNRTReplicaWithRemoteStorePromotedAsPrimaryRefreshRefresh() throws Exception { + testNRTReplicaWithRemoteStorePromotedAsPrimary(false, false); + } + + public void testNRTReplicaWithRemoteStorePromotedAsPrimaryRefreshCommit() throws Exception { + testNRTReplicaWithRemoteStorePromotedAsPrimary(false, true); + } + + public void testNRTReplicaWithRemoteStorePromotedAsPrimaryCommitRefresh() throws Exception { + testNRTReplicaWithRemoteStorePromotedAsPrimary(true, false); + } + + public void testNRTReplicaWithRemoteStorePromotedAsPrimaryCommitCommit() throws Exception { + testNRTReplicaWithRemoteStorePromotedAsPrimary(true, true); + } + + public void testNRTReplicaWithRemoteStorePromotedAsPrimary(boolean performFlushFirst, boolean performFlushSecond) throws Exception { + try ( + ReplicationGroup shards = createGroup(1, getIndexSettings(), indexMapping, new NRTReplicationEngineFactory(), createTempDir()) + ) { + shards.startAll(); + IndexShard oldPrimary = shards.getPrimary(); + final IndexShard nextPrimary = shards.getReplicas().get(0); + + // 1. Create ops that are in the index and xlog of both shards but not yet part of a commit point. + final int numDocs = shards.indexDocs(randomInt(10)); + + // refresh but do not copy the segments over. + if (performFlushFirst) { + flushShard(oldPrimary, true); + } else { + oldPrimary.refresh("Test"); + } + // replicateSegments(primary, shards.getReplicas()); + + // at this point both shards should have numDocs persisted and searchable. + assertDocCounts(oldPrimary, numDocs, numDocs); + for (IndexShard shard : shards.getReplicas()) { + assertDocCounts(shard, numDocs, 0); + } + + // 2. Create ops that are in the replica's xlog, not in the index. + // index some more into both but don't replicate. replica will have only numDocs searchable, but should have totalDocs + // persisted. + final int additonalDocs = shards.indexDocs(randomInt(10)); + final int totalDocs = numDocs + additonalDocs; + + if (performFlushSecond) { + flushShard(oldPrimary, true); + } else { + oldPrimary.refresh("Test"); + } + assertDocCounts(oldPrimary, totalDocs, totalDocs); + for (IndexShard shard : shards.getReplicas()) { + assertDocCounts(shard, totalDocs, 0); + } + assertTrue(nextPrimary.translogStats().estimatedNumberOfOperations() >= additonalDocs); + assertTrue(nextPrimary.translogStats().getUncommittedOperations() >= additonalDocs); + + int prevOperationCount = nextPrimary.translogStats().estimatedNumberOfOperations(); + + // promote the replica + shards.promoteReplicaToPrimary(nextPrimary).get(); + + // close oldPrimary. + oldPrimary.close("demoted", false, false); + oldPrimary.store().close(); + + assertEquals(InternalEngine.class, nextPrimary.getEngine().getClass()); + assertDocCounts(nextPrimary, totalDocs, totalDocs); + + // As we are downloading segments from remote segment store on failover, there should not be + // any operations replayed from translog + assertEquals(prevOperationCount, nextPrimary.translogStats().estimatedNumberOfOperations()); + + // refresh and push segments to our other replica. + nextPrimary.refresh("test"); + + for (IndexShard shard : shards) { + assertConsistentHistoryBetweenTranslogAndLucene(shard); + } + final List docsAfterRecovery = getDocIdAndSeqNos(shards.getPrimary()); + for (IndexShard shard : shards.getReplicas()) { + assertThat(shard.routingEntry().toString(), getDocIdAndSeqNos(shard), equalTo(docsAfterRecovery)); + } + } } } diff --git a/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java b/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java index 7c765cf5df0be..3b2e33388925a 100644 --- a/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java +++ b/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java @@ -43,6 +43,8 @@ import org.opensearch.index.store.lockmanager.RemoteStoreMetadataLockManager; import org.opensearch.index.store.remote.metadata.RemoteSegmentMetadata; import org.opensearch.index.store.remote.metadata.RemoteSegmentMetadataHandler; +import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; +import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.threadpool.ThreadPool; import java.io.IOException; @@ -55,8 +57,6 @@ import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import java.util.HashMap; -import java.util.Collection; import java.util.concurrent.ExecutorService; import static org.mockito.Mockito.mock; @@ -101,7 +101,10 @@ public void setup() throws IOException { ); testUploadTracker = new TestUploadListener(); - Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, org.opensearch.Version.CURRENT).build(); + Settings indexSettings = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, org.opensearch.Version.CURRENT) + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .build(); ExecutorService executorService = OpenSearchExecutors.newDirectExecutorService(); indexShard = newStartedShard(false, indexSettings, new NRTReplicationEngineFactory()); @@ -260,7 +263,7 @@ private Map getDummyMetadata(String prefix, int commitGeneration * @return ByteArrayIndexInput: metadata file bytes with header and footer * @throws IOException IOException */ - private ByteArrayIndexInput createMetadataFileBytes(Map segmentFilesMap, long generation, long primaryTerm) + private ByteArrayIndexInput createMetadataFileBytes(Map segmentFilesMap, ReplicationCheckpoint replicationCheckpoint) throws IOException { ByteBuffersDataOutput byteBuffersIndexOutput = new ByteBuffersDataOutput(); segmentInfos.write(new ByteBuffersIndexOutput(byteBuffersIndexOutput, "", "")); @@ -270,8 +273,7 @@ private ByteArrayIndexInput createMetadataFileBytes(Map segmentF OutputStreamIndexOutput indexOutput = new OutputStreamIndexOutput("segment metadata", "metadata output stream", output, 4096); CodecUtil.writeHeader(indexOutput, RemoteSegmentMetadata.METADATA_CODEC, RemoteSegmentMetadata.CURRENT_VERSION); indexOutput.writeMapOfStrings(segmentFilesMap); - indexOutput.writeLong(generation); - indexOutput.writeLong(primaryTerm); + RemoteSegmentMetadata.writeCheckpointToIndexOutput(replicationCheckpoint, indexOutput); indexOutput.writeLong(byteArray.length); indexOutput.writeBytes(byteArray, byteArray.length); CodecUtil.writeFooter(indexOutput); @@ -309,13 +311,13 @@ private Map> populateMetadata() throws IOException { ); when(remoteMetadataDirectory.openInput(metadataFilename, IOContext.DEFAULT)).thenAnswer( - I -> createMetadataFileBytes(metadataFilenameContentMapping.get(metadataFilename), 23, 12) + I -> createMetadataFileBytes(metadataFilenameContentMapping.get(metadataFilename), indexShard.getLatestReplicationCheckpoint()) ); when(remoteMetadataDirectory.openInput(metadataFilename2, IOContext.DEFAULT)).thenAnswer( - I -> createMetadataFileBytes(metadataFilenameContentMapping.get(metadataFilename2), 13, 12) + I -> createMetadataFileBytes(metadataFilenameContentMapping.get(metadataFilename2), indexShard.getLatestReplicationCheckpoint()) ); when(remoteMetadataDirectory.openInput(metadataFilename3, IOContext.DEFAULT)).thenAnswer( - I -> createMetadataFileBytes(metadataFilenameContentMapping.get(metadataFilename3), 38, 10) + I -> createMetadataFileBytes(metadataFilenameContentMapping.get(metadataFilename3), indexShard.getLatestReplicationCheckpoint()) ); return metadataFilenameContentMapping; @@ -651,7 +653,9 @@ public void testContainsFile() throws IOException { metadata.put("_0.cfe", "_0.cfe::_0.cfe__" + UUIDs.base64UUID() + "::1234::512::" + Version.LATEST.major); metadata.put("_0.cfs", "_0.cfs::_0.cfs__" + UUIDs.base64UUID() + "::2345::1024::" + Version.LATEST.major); - when(remoteMetadataDirectory.openInput(metadataFilename, IOContext.DEFAULT)).thenReturn(createMetadataFileBytes(metadata, 1, 5)); + when(remoteMetadataDirectory.openInput(metadataFilename, IOContext.DEFAULT)).thenReturn( + createMetadataFileBytes(metadata, indexShard.getLatestReplicationCheckpoint()) + ); remoteSegmentStoreDirectory.init(); @@ -676,12 +680,19 @@ public void testContainsFile() throws IOException { public void testUploadMetadataEmpty() throws IOException { Directory storeDirectory = mock(Directory.class); IndexOutput indexOutput = mock(IndexOutput.class); - when(storeDirectory.createOutput(startsWith("metadata__12__o"), eq(IOContext.DEFAULT))).thenReturn(indexOutput); + final long primaryTerm = indexShard.getOperationPrimaryTerm(); + when(storeDirectory.createOutput(startsWith("metadata__" + primaryTerm + "__o"), eq(IOContext.DEFAULT))).thenReturn(indexOutput); Collection segmentFiles = List.of("_s1.si", "_s1.cfe", "_s3.cfs"); assertThrows( NoSuchFileException.class, - () -> remoteSegmentStoreDirectory.uploadMetadata(segmentFiles, segmentInfos, storeDirectory, 12L, 34L) + () -> remoteSegmentStoreDirectory.uploadMetadata( + segmentFiles, + segmentInfos, + storeDirectory, + 34L, + indexShard.getLatestReplicationCheckpoint() + ) ); } @@ -689,7 +700,7 @@ public void testUploadMetadataNonEmpty() throws IOException { indexDocs(142364, 5); flushShard(indexShard, true); SegmentInfos segInfos = indexShard.store().readLastCommittedSegmentsInfo(); - long primaryTerm = 12; + long primaryTerm = indexShard.getLatestReplicationCheckpoint().getPrimaryTerm(); String primaryTermLong = RemoteStoreUtils.invertLong(primaryTerm); long generation = segInfos.getGeneration(); String generationLong = RemoteStoreUtils.invertLong(generation); @@ -706,7 +717,7 @@ public void testUploadMetadataNonEmpty() throws IOException { getDummyMetadata("_0", (int) generation) ); when(remoteMetadataDirectory.openInput(latestMetadataFileName, IOContext.DEFAULT)).thenReturn( - createMetadataFileBytes(metadataFilenameContentMapping.get(latestMetadataFileName), generation, primaryTerm) + createMetadataFileBytes(metadataFilenameContentMapping.get(latestMetadataFileName), indexShard.getLatestReplicationCheckpoint()) ); remoteSegmentStoreDirectory.init(); @@ -717,7 +728,13 @@ public void testUploadMetadataNonEmpty() throws IOException { when(storeDirectory.createOutput(startsWith("metadata__" + primaryTermLong + "__" + generationLong), eq(IOContext.DEFAULT))) .thenReturn(indexOutput); - remoteSegmentStoreDirectory.uploadMetadata(segInfos.files(true), segInfos, storeDirectory, primaryTerm, generation); + remoteSegmentStoreDirectory.uploadMetadata( + segInfos.files(true), + segInfos, + storeDirectory, + generation, + indexShard.getLatestReplicationCheckpoint() + ); verify(remoteMetadataDirectory).copyFrom( eq(storeDirectory), diff --git a/server/src/test/java/org/opensearch/index/store/remote/metadata/RemoteSegmentMetadataHandlerTests.java b/server/src/test/java/org/opensearch/index/store/remote/metadata/RemoteSegmentMetadataHandlerTests.java index 2fee77ab563c0..d0136f04afd75 100644 --- a/server/src/test/java/org/opensearch/index/store/remote/metadata/RemoteSegmentMetadataHandlerTests.java +++ b/server/src/test/java/org/opensearch/index/store/remote/metadata/RemoteSegmentMetadataHandlerTests.java @@ -25,6 +25,8 @@ import org.opensearch.index.shard.IndexShard; import org.opensearch.index.shard.IndexShardTestCase; import org.opensearch.index.store.Store; +import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; +import org.opensearch.indices.replication.common.ReplicationType; import java.io.IOException; import java.util.HashMap; @@ -38,16 +40,23 @@ public class RemoteSegmentMetadataHandlerTests extends IndexShardTestCase { private IndexShard indexShard; private SegmentInfos segmentInfos; + private ReplicationCheckpoint replicationCheckpoint; + @Before public void setup() throws IOException { remoteSegmentMetadataHandler = new RemoteSegmentMetadataHandler(); - Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, org.opensearch.Version.CURRENT).build(); + Settings indexSettings = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, org.opensearch.Version.CURRENT) + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) + .build(); indexShard = newStartedShard(false, indexSettings, new NRTReplicationEngineFactory()); try (Store store = indexShard.store()) { segmentInfos = store.readLastCommittedSegmentsInfo(); } + replicationCheckpoint = indexShard.getLatestReplicationCheckpoint(); } @After @@ -61,8 +70,7 @@ public void testReadContentNoSegmentInfos() throws IOException { OutputStreamIndexOutput indexOutput = new OutputStreamIndexOutput("dummy bytes", "dummy stream", output, 4096); Map expectedOutput = getDummyData(); indexOutput.writeMapOfStrings(expectedOutput); - indexOutput.writeLong(1234); - indexOutput.writeLong(1234); + RemoteSegmentMetadata.writeCheckpointToIndexOutput(replicationCheckpoint, indexOutput); indexOutput.writeLong(0); indexOutput.writeBytes(new byte[0], 0); indexOutput.close(); @@ -70,7 +78,7 @@ public void testReadContentNoSegmentInfos() throws IOException { new ByteArrayIndexInput("dummy bytes", BytesReference.toBytes(output.bytes())) ); assertEquals(expectedOutput, metadata.toMapOfStrings()); - assertEquals(1234, metadata.getGeneration()); + assertEquals(replicationCheckpoint.getSegmentsGen(), metadata.getGeneration()); } public void testReadContentWithSegmentInfos() throws IOException { @@ -78,8 +86,7 @@ public void testReadContentWithSegmentInfos() throws IOException { OutputStreamIndexOutput indexOutput = new OutputStreamIndexOutput("dummy bytes", "dummy stream", output, 4096); Map expectedOutput = getDummyData(); indexOutput.writeMapOfStrings(expectedOutput); - indexOutput.writeLong(1234); - indexOutput.writeLong(1234); + RemoteSegmentMetadata.writeCheckpointToIndexOutput(replicationCheckpoint, indexOutput); ByteBuffersIndexOutput segmentInfosOutput = new ByteBuffersIndexOutput(new ByteBuffersDataOutput(), "test", "resource"); segmentInfos.write(segmentInfosOutput); byte[] segmentInfosBytes = segmentInfosOutput.toArrayCopy(); @@ -90,7 +97,7 @@ public void testReadContentWithSegmentInfos() throws IOException { new ByteArrayIndexInput("dummy bytes", BytesReference.toBytes(output.bytes())) ); assertEquals(expectedOutput, metadata.toMapOfStrings()); - assertEquals(1234, metadata.getGeneration()); + assertEquals(replicationCheckpoint.getSegmentsGen(), metadata.getGeneration()); assertArrayEquals(segmentInfosBytes, metadata.getSegmentInfosBytes()); } @@ -106,8 +113,7 @@ public void testWriteContent() throws IOException { RemoteSegmentMetadata remoteSegmentMetadata = new RemoteSegmentMetadata( RemoteSegmentMetadata.fromMapOfStrings(expectedOutput), segmentInfosBytes, - 1234, - 1234 + indexShard.getLatestReplicationCheckpoint() ); remoteSegmentMetadataHandler.writeContent(indexOutput, remoteSegmentMetadata); indexOutput.close(); @@ -116,8 +122,8 @@ public void testWriteContent() throws IOException { new ByteArrayIndexInput("dummy bytes", BytesReference.toBytes(output.bytes())) ); assertEquals(expectedOutput, metadata.toMapOfStrings()); - assertEquals(1234, metadata.getGeneration()); - assertEquals(1234, metadata.getPrimaryTerm()); + assertEquals(replicationCheckpoint.getSegmentsGen(), metadata.getGeneration()); + assertEquals(replicationCheckpoint.getPrimaryTerm(), metadata.getPrimaryTerm()); assertArrayEquals(segmentInfosBytes, metadata.getSegmentInfosBytes()); } diff --git a/server/src/test/java/org/opensearch/indices/recovery/RemoteStorePeerRecoverySourceHandlerTests.java b/server/src/test/java/org/opensearch/indices/recovery/RemoteStorePeerRecoverySourceHandlerTests.java index 8135d9cd3718e..40182a85608ea 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/RemoteStorePeerRecoverySourceHandlerTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/RemoteStorePeerRecoverySourceHandlerTests.java @@ -8,15 +8,20 @@ package org.opensearch.indices.recovery; +import org.junit.Before; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.IndexSettings; import org.opensearch.index.engine.NRTReplicationEngineFactory; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.replication.OpenSearchIndexLevelReplicationTestCase; import org.opensearch.index.seqno.ReplicationTracker; import org.opensearch.index.shard.IndexShard; import org.opensearch.indices.replication.common.ReplicationType; +import java.nio.file.Path; + public class RemoteStorePeerRecoverySourceHandlerTests extends OpenSearchIndexLevelReplicationTestCase { private static final Settings settings = Settings.builder() @@ -26,23 +31,36 @@ public class RemoteStorePeerRecoverySourceHandlerTests extends OpenSearchIndexLe .put(IndexSettings.INDEX_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING.getKey(), "100ms") .build(); + @Before + public void setup() { + // Todo: Remove feature flag once remote store integration with segrep goes GA + FeatureFlags.initializeFeatureFlags( + Settings.builder().put(FeatureFlags.SEGMENT_REPLICATION_EXPERIMENTAL_SETTING.getKey(), "true").build() + ); + } + public void testReplicaShardRecoveryUptoLastFlushedCommit() throws Exception { - try (ReplicationGroup shards = createGroup(0, settings, new NRTReplicationEngineFactory())) { + final Path remoteDir = createTempDir(); + final String indexMapping = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": {} }"; + try (ReplicationGroup shards = createGroup(0, settings, indexMapping, new NRTReplicationEngineFactory(), remoteDir)) { // Step1 - Start primary, index docs and flush shards.startPrimary(); final IndexShard primary = shards.getPrimary(); - int numDocs = shards.indexDocs(randomIntBetween(10, 100)); + int numDocs = shards.indexDocs(randomIntBetween(10, 20)); + logger.info("--> Index numDocs {} and flush", numDocs); shards.flush(); // Step 2 - Start replica for recovery to happen, check both has same number of docs - final IndexShard replica1 = shards.addReplica(); + final IndexShard replica1 = shards.addReplica(remoteDir); + logger.info("--> Added and started replica {}", replica1.routingEntry()); shards.startAll(); assertEquals(getDocIdAndSeqNos(primary), getDocIdAndSeqNos(replica1)); // Step 3 - Index more docs, run segment replication, check both have same number of docs - int moreDocs = shards.indexDocs(randomIntBetween(10, 100)); + int moreDocs = shards.indexDocs(randomIntBetween(10, 20)); primary.refresh("test"); + logger.info("--> Index more docs {} and replicate segments", moreDocs); replicateSegments(primary, shards.getReplicas()); assertEquals(getDocIdAndSeqNos(primary), getDocIdAndSeqNos(replica1)); @@ -55,7 +73,8 @@ public void testReplicaShardRecoveryUptoLastFlushedCommit() throws Exception { assertFalse(primary.getRetentionLeases().contains(ReplicationTracker.getPeerRecoveryRetentionLeaseId(replica1.routingEntry()))); // Step 6 - Start new replica, recovery happens, and check that new replica has all docs - final IndexShard replica2 = shards.addReplica(); + final IndexShard replica2 = shards.addReplica(remoteDir); + logger.info("--> Added and started replica {}", replica2.routingEntry()); shards.startAll(); shards.assertAllEqual(numDocs + moreDocs); diff --git a/server/src/test/java/org/opensearch/indices/replication/RemoteStoreReplicationSourceTests.java b/server/src/test/java/org/opensearch/indices/replication/RemoteStoreReplicationSourceTests.java index 04b5aa58ea485..9204f48ba5bdd 100644 --- a/server/src/test/java/org/opensearch/indices/replication/RemoteStoreReplicationSourceTests.java +++ b/server/src/test/java/org/opensearch/indices/replication/RemoteStoreReplicationSourceTests.java @@ -8,43 +8,38 @@ package org.opensearch.indices.replication; -import org.apache.lucene.codecs.Codec; import org.apache.lucene.store.FilterDirectory; -import org.mockito.Mockito; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; import org.opensearch.index.engine.InternalEngineFactory; +import org.opensearch.index.engine.NRTReplicationEngineFactory; import org.opensearch.index.replication.OpenSearchIndexLevelReplicationTestCase; import org.opensearch.index.shard.IndexShard; import org.opensearch.index.shard.IndexShardState; import org.opensearch.index.shard.RemoteStoreRefreshListenerTests; import org.opensearch.index.store.RemoteSegmentStoreDirectory; import org.opensearch.index.store.Store; +import org.opensearch.index.store.StoreFileMetadata; import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; import org.opensearch.indices.replication.common.ReplicationType; import java.io.IOException; import java.util.Collections; +import java.util.List; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class RemoteStoreReplicationSourceTests extends OpenSearchIndexLevelReplicationTestCase { - - private static final long PRIMARY_TERM = 1L; - private static final long SEGMENTS_GEN = 2L; - private static final long VERSION = 4L; private static final long REPLICATION_ID = 123L; private RemoteStoreReplicationSource replicationSource; - private IndexShard indexShard; - - private IndexShard mockShard; - - private Store remoteStore; + private IndexShard primaryShard; + private IndexShard replicaShard; private final Settings settings = Settings.builder() .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, "my-repo") @@ -55,146 +50,110 @@ public class RemoteStoreReplicationSourceTests extends OpenSearchIndexLevelRepli @Override public void setUp() throws Exception { super.setUp(); - - indexShard = newStartedShard(true, settings, new InternalEngineFactory()); - - indexDoc(indexShard, "_doc", "1"); - indexDoc(indexShard, "_doc", "2"); - indexShard.refresh("test"); - - // mock shard - mockShard = mock(IndexShard.class); - Store store = mock(Store.class); - when(mockShard.store()).thenReturn(store); - when(store.directory()).thenReturn(indexShard.store().directory()); - remoteStore = mock(Store.class); - when(mockShard.remoteStore()).thenReturn(remoteStore); - RemoteSegmentStoreDirectory remoteSegmentStoreDirectory = - (RemoteSegmentStoreDirectory) ((FilterDirectory) ((FilterDirectory) indexShard.remoteStore().directory()).getDelegate()) - .getDelegate(); - FilterDirectory remoteStoreFilterDirectory = new RemoteStoreRefreshListenerTests.TestFilterDirectory( - new RemoteStoreRefreshListenerTests.TestFilterDirectory(remoteSegmentStoreDirectory) - ); - when(remoteStore.directory()).thenReturn(remoteStoreFilterDirectory); - replicationSource = new RemoteStoreReplicationSource(mockShard); + primaryShard = newStartedShard(true, settings, new InternalEngineFactory()); + indexDoc(primaryShard, "_doc", "1"); + indexDoc(primaryShard, "_doc", "2"); + primaryShard.refresh("test"); + replicaShard = newStartedShard(false, settings, new NRTReplicationEngineFactory()); } @Override public void tearDown() throws Exception { - closeShards(indexShard); + closeShards(primaryShard, replicaShard); super.tearDown(); } public void testGetCheckpointMetadata() throws ExecutionException, InterruptedException { - when(mockShard.getSegmentInfosSnapshot()).thenReturn(indexShard.getSegmentInfosSnapshot()); - final ReplicationCheckpoint checkpoint = new ReplicationCheckpoint( - indexShard.shardId(), - PRIMARY_TERM, - SEGMENTS_GEN, - VERSION, - Codec.getDefault().getName() - ); - + final ReplicationCheckpoint checkpoint = primaryShard.getLatestReplicationCheckpoint(); final PlainActionFuture res = PlainActionFuture.newFuture(); + replicationSource = new RemoteStoreReplicationSource(primaryShard); replicationSource.getCheckpointMetadata(REPLICATION_ID, checkpoint, res); CheckpointInfoResponse response = res.get(); assert (response.getCheckpoint().equals(checkpoint)); - assert (!response.getMetadataMap().isEmpty()); + assert (response.getMetadataMap().isEmpty() == false); } public void testGetCheckpointMetadataFailure() { - final ReplicationCheckpoint checkpoint = new ReplicationCheckpoint( - indexShard.shardId(), - PRIMARY_TERM, - SEGMENTS_GEN, - VERSION, - Codec.getDefault().getName() - ); - + IndexShard mockShard = mock(IndexShard.class); + final ReplicationCheckpoint checkpoint = primaryShard.getLatestReplicationCheckpoint(); when(mockShard.getSegmentInfosSnapshot()).thenThrow(new RuntimeException("test")); - assertThrows(RuntimeException.class, () -> { + replicationSource = new RemoteStoreReplicationSource(mockShard); final PlainActionFuture res = PlainActionFuture.newFuture(); replicationSource.getCheckpointMetadata(REPLICATION_ID, checkpoint, res); res.get(); }); } - public void testGetCheckpointMetadataEmpty() throws ExecutionException, InterruptedException, IOException { - when(mockShard.getSegmentInfosSnapshot()).thenReturn(indexShard.getSegmentInfosSnapshot()); - final ReplicationCheckpoint checkpoint = new ReplicationCheckpoint( - indexShard.shardId(), - PRIMARY_TERM, - SEGMENTS_GEN, - VERSION, - Codec.getDefault().getName() - ); - IndexShard emptyIndexShard = null; - try { - emptyIndexShard = newStartedShard( - true, - settings, - new InternalEngineFactory() - ); - RemoteSegmentStoreDirectory remoteSegmentStoreDirectory = - (RemoteSegmentStoreDirectory) ((FilterDirectory) ((FilterDirectory) emptyIndexShard.remoteStore().directory()).getDelegate()) - .getDelegate(); - FilterDirectory remoteStoreFilterDirectory = new RemoteStoreRefreshListenerTests.TestFilterDirectory( - new RemoteStoreRefreshListenerTests.TestFilterDirectory(remoteSegmentStoreDirectory) - ); - when(remoteStore.directory()).thenReturn(remoteStoreFilterDirectory); + public void testGetSegmentFiles() throws ExecutionException, InterruptedException, IOException { + final ReplicationCheckpoint checkpoint = primaryShard.getLatestReplicationCheckpoint(); + List filesToFetch = primaryShard.getSegmentMetadataMap().values().stream().collect(Collectors.toList()); + final PlainActionFuture res = PlainActionFuture.newFuture(); + replicationSource = new RemoteStoreReplicationSource(primaryShard); + replicationSource.getSegmentFiles(REPLICATION_ID, checkpoint, filesToFetch, replicaShard, res); + GetSegmentFilesResponse response = res.get(); + assertEquals(response.files.size(), filesToFetch.size()); + assertTrue(response.files.containsAll(filesToFetch)); + closeShards(replicaShard); + } - final PlainActionFuture res = PlainActionFuture.newFuture(); - when(mockShard.state()).thenReturn(IndexShardState.RECOVERING); - // Recovering shard should just do a noop and return empty metadata map. - replicationSource.getCheckpointMetadata(REPLICATION_ID, checkpoint, res); - CheckpointInfoResponse response = res.get(); - assert (response.getCheckpoint().equals(checkpoint)); - assert (response.getMetadataMap().isEmpty()); - - when(mockShard.state()).thenReturn(IndexShardState.STARTED); - // Started shard should fail with assertion error. - expectThrows(AssertionError.class, () -> { - final PlainActionFuture res2 = PlainActionFuture.newFuture(); - replicationSource.getCheckpointMetadata(REPLICATION_ID, checkpoint, res2); - }); - } finally { - closeShards(emptyIndexShard); + public void testGetSegmentFilesAlreadyExists() throws IOException, InterruptedException { + final ReplicationCheckpoint checkpoint = primaryShard.getLatestReplicationCheckpoint(); + List filesToFetch = primaryShard.getSegmentMetadataMap().values().stream().collect(Collectors.toList()); + CountDownLatch latch = new CountDownLatch(1); + try { + final PlainActionFuture res = PlainActionFuture.newFuture(); + replicationSource = new RemoteStoreReplicationSource(primaryShard); + replicationSource.getSegmentFiles(REPLICATION_ID, checkpoint, filesToFetch, primaryShard, res); + res.get(); + } catch (AssertionError | ExecutionException ex) { + latch.countDown(); + assertTrue(ex instanceof AssertionError); + assertTrue(ex.getMessage().startsWith("Local store already contains the file")); } + latch.await(); } - public void testGetSegmentFiles() throws ExecutionException, InterruptedException { - final ReplicationCheckpoint checkpoint = new ReplicationCheckpoint( - indexShard.shardId(), - PRIMARY_TERM, - SEGMENTS_GEN, - VERSION, - Codec.getDefault().getName() - ); - + public void testGetSegmentFilesReturnEmptyResponse() throws ExecutionException, InterruptedException { + final ReplicationCheckpoint checkpoint = primaryShard.getLatestReplicationCheckpoint(); final PlainActionFuture res = PlainActionFuture.newFuture(); - replicationSource.getSegmentFiles(REPLICATION_ID, checkpoint, Collections.emptyList(), indexShard, res); + replicationSource = new RemoteStoreReplicationSource(primaryShard); + replicationSource.getSegmentFiles(REPLICATION_ID, checkpoint, Collections.emptyList(), primaryShard, res); GetSegmentFilesResponse response = res.get(); assert (response.files.isEmpty()); - assertEquals("remote store", replicationSource.getDescription()); - } - public void testGetSegmentFilesFailure() throws IOException { - final ReplicationCheckpoint checkpoint = new ReplicationCheckpoint( - indexShard.shardId(), - PRIMARY_TERM, - SEGMENTS_GEN, - VERSION, - Codec.getDefault().getName() - ); - Mockito.doThrow(new RuntimeException("testing")) - .when(mockShard) - .syncSegmentsFromRemoteSegmentStore(Mockito.anyBoolean(), Mockito.anyBoolean(), Mockito.anyBoolean()); - assertThrows(ExecutionException.class, () -> { - final PlainActionFuture res = PlainActionFuture.newFuture(); - replicationSource.getSegmentFiles(REPLICATION_ID, checkpoint, Collections.emptyList(), mockShard, res); - res.get(10, TimeUnit.SECONDS); + public void testGetCheckpointMetadataEmpty() throws ExecutionException, InterruptedException, IOException { + IndexShard mockShard = mock(IndexShard.class); + // Build mockShard to return replicaShard directory so that empty metadata file is returned. + buildIndexShardBehavior(mockShard, replicaShard); + replicationSource = new RemoteStoreReplicationSource(mockShard); + + // Mock replica shard state to RECOVERING so that getCheckpointInfo return empty map + final ReplicationCheckpoint checkpoint = replicaShard.getLatestReplicationCheckpoint(); + final PlainActionFuture res = PlainActionFuture.newFuture(); + when(mockShard.state()).thenReturn(IndexShardState.RECOVERING); + replicationSource = new RemoteStoreReplicationSource(mockShard); + // Recovering shard should just do a noop and return empty metadata map. + replicationSource.getCheckpointMetadata(REPLICATION_ID, checkpoint, res); + CheckpointInfoResponse response = res.get(); + assert (response.getCheckpoint().equals(checkpoint)); + assert (response.getMetadataMap().isEmpty()); + + // Started shard should fail with assertion error. + when(mockShard.state()).thenReturn(IndexShardState.STARTED); + expectThrows(AssertionError.class, () -> { + final PlainActionFuture res2 = PlainActionFuture.newFuture(); + replicationSource.getCheckpointMetadata(REPLICATION_ID, checkpoint, res2); }); } + + private void buildIndexShardBehavior(IndexShard mockShard, IndexShard indexShard) { + when(mockShard.getSegmentInfosSnapshot()).thenReturn(indexShard.getSegmentInfosSnapshot()); + Store remoteStore = mock(Store.class); + when(mockShard.remoteStore()).thenReturn(remoteStore); + RemoteSegmentStoreDirectory remoteSegmentStoreDirectory = (RemoteSegmentStoreDirectory) ((FilterDirectory) ((FilterDirectory) indexShard.remoteStore().directory()).getDelegate()).getDelegate(); + FilterDirectory remoteStoreFilterDirectory = new RemoteStoreRefreshListenerTests.TestFilterDirectory(new RemoteStoreRefreshListenerTests.TestFilterDirectory(remoteSegmentStoreDirectory)); + when(remoteStore.directory()).thenReturn(remoteStoreFilterDirectory); + } } diff --git a/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java index f3c98ce4f9f03..278847e56e65f 100644 --- a/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java @@ -149,7 +149,11 @@ protected ReplicationGroup createGroup(int replicas, Settings settings, EngineFa protected ReplicationGroup createGroup(int replicas, Settings settings, String mappings, EngineFactory engineFactory) throws IOException { - return createGroup(replicas, settings, mappings, engineFactory, null); + Path remotePath = null; + if ("true".equals(settings.get(IndexMetadata.SETTING_REMOTE_STORE_ENABLED))) { + remotePath = createTempDir(); + } + return createGroup(replicas, settings, mappings, engineFactory, remotePath); } protected ReplicationGroup createGroup(int replicas, Settings settings, String mappings, EngineFactory engineFactory, Path remotePath) diff --git a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java index 7a492dbebd836..66e5459cfea3b 100644 --- a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java @@ -1326,9 +1326,27 @@ public static Engine.Warmer createTestWarmer(IndexSettings indexSettings) { }; } + private SegmentReplicationTargetService getSegmentReplicationTargetService( + TransportService transportService, + IndicesService indicesService, + ClusterService clusterService, + SegmentReplicationSourceFactory sourceFactory + ) { + return new SegmentReplicationTargetService( + threadPool, + new RecoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), + transportService, + sourceFactory, + indicesService, + clusterService + ); + } + /** * Segment Replication specific test method - Creates a {@link SegmentReplicationTargetService} to perform replications that has - * been configured to return the given primaryShard's current segments. + * been configured to return the given primaryShard's current segments. In order to do so, it mimics the replication + * source (to avoid transport calls) and simply copies over the segment files from primary store to replica's as part of + * get_files calls. * * @param primaryShard {@link IndexShard} - The target replica shard in segment replication. * @param target {@link IndexShard} - The source primary shard in segment replication. @@ -1339,7 +1357,7 @@ public static Engine.Warmer createTestWarmer(IndexSettings indexSettings) { * which are desired right after files are copied. e.g. To work with temp files * @return Returns SegmentReplicationTargetService */ - public final SegmentReplicationTargetService prepareForReplication( + private SegmentReplicationTargetService prepareForReplication( IndexShard primaryShard, IndexShard target, TransportService transportService, @@ -1347,22 +1365,28 @@ public final SegmentReplicationTargetService prepareForReplication( ClusterService clusterService, Consumer postGetFilesRunnable ) { - final SegmentReplicationSourceFactory sourceFactory = mock(SegmentReplicationSourceFactory.class); - final SegmentReplicationTargetService targetService = new SegmentReplicationTargetService( - threadPool, - new RecoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), - transportService, - sourceFactory, - indicesService, - clusterService - ); - final SegmentReplicationSource replicationSource = getSegmentReplicationSource( - primaryShard, - (repId) -> targetService.get(repId), - postGetFilesRunnable - ); - when(sourceFactory.get(any())).thenReturn(replicationSource); - when(indicesService.getShardOrNull(any())).thenReturn(target); + + SegmentReplicationSourceFactory sourceFactory = null; + SegmentReplicationTargetService targetService; + if (primaryShard.indexSettings.isRemoteStoreEnabled()) { + RecoverySettings recoverySettings = new RecoverySettings( + Settings.EMPTY, + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) + ); + sourceFactory = new SegmentReplicationSourceFactory(transportService, recoverySettings, clusterService); + targetService = getSegmentReplicationTargetService(transportService, indicesService, clusterService, sourceFactory); + } else { + sourceFactory = mock(SegmentReplicationSourceFactory.class); + targetService = getSegmentReplicationTargetService(transportService, indicesService, clusterService, sourceFactory); + final SegmentReplicationSource replicationSource = getSegmentReplicationSource( + primaryShard, + (repId) -> targetService.get(repId), + postGetFilesRunnable + ); + when(sourceFactory.get(any())).thenReturn(replicationSource); + // This is needed for force segment sync call. Remote store uses a different recovery mechanism + when(indicesService.getShardOrNull(any())).thenReturn(target); + } return targetService; } @@ -1502,9 +1526,11 @@ public void getSegmentFiles( * @param replicaShards - Replicas that will be updated. * @return {@link List} List of target components orchestrating replication. */ - public final List replicateSegments(IndexShard primaryShard, List replicaShards) + protected final List replicateSegments(IndexShard primaryShard, List replicaShards) throws IOException, InterruptedException { + // Latch to block test execution until replica catches up final CountDownLatch countDownLatch = new CountDownLatch(replicaShards.size()); + // Get primary metadata to verify with replica's, used to ensure replica catches up Map primaryMetadata; try (final GatedCloseable segmentInfosSnapshot = primaryShard.getSegmentInfosSnapshot()) { final SegmentInfos primarySegmentInfos = segmentInfosSnapshot.get(); From 82b5f3c7f95db84d4c71ed9aa9c968334da7a926 Mon Sep 17 00:00:00 2001 From: Sarthak Aggarwal Date: Wed, 2 Aug 2023 13:54:07 +0530 Subject: [PATCH 46/75] Disallowing compression level for lz4 and best_compression codec (#8737) * Disallowing compression level for lz4 and best_compression codec * setting up codec settings interface for validation Signed-off-by: Sarthak Aggarwal --- CHANGELOG.md | 1 + .../index/codec/CodecCompressionLevelIT.java | 178 ++++++++++++++++++ .../opensearch/index/codec/CodecSettings.java | 21 +++ .../index/codec/customcodecs/ZstdCodec.java | 10 +- .../codec/customcodecs/ZstdNoDictCodec.java | 10 +- .../opensearch/index/engine/EngineConfig.java | 51 ++++- .../opensearch/index/codec/CodecTests.java | 71 ++++++- 7 files changed, 332 insertions(+), 10 deletions(-) create mode 100644 server/src/internalClusterTest/java/org/opensearch/index/codec/CodecCompressionLevelIT.java create mode 100644 server/src/main/java/org/opensearch/index/codec/CodecSettings.java diff --git a/CHANGELOG.md b/CHANGELOG.md index a37976462b38e..e29bbd2da4db5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -81,6 +81,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Start replication checkpointTimers on primary before segments upload to remote store. ([#8221]()https://github.com/opensearch-project/OpenSearch/pull/8221) - [distribution/archives] [Linux] [x64] Provide the variant of the distributions bundled with JRE ([#8195]()https://github.com/opensearch-project/OpenSearch/pull/8195) - Add configuration for file cache size to max remote data ratio to prevent oversubscription of file cache ([#8606](https://github.com/opensearch-project/OpenSearch/pull/8606)) +- Disallow compression level to be set for default and best_compression index codecs ([#8737]()https://github.com/opensearch-project/OpenSearch/pull/8737) ### Dependencies - Bump `org.apache.logging.log4j:log4j-core` from 2.17.1 to 2.20.0 ([#8307](https://github.com/opensearch-project/OpenSearch/pull/8307)) diff --git a/server/src/internalClusterTest/java/org/opensearch/index/codec/CodecCompressionLevelIT.java b/server/src/internalClusterTest/java/org/opensearch/index/codec/CodecCompressionLevelIT.java new file mode 100644 index 0000000000000..5f3e53f1454fc --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/index/codec/CodecCompressionLevelIT.java @@ -0,0 +1,178 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec; + +import org.apache.logging.log4j.core.util.Throwables; +import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.test.OpenSearchIntegTestCase; + +import java.util.concurrent.ExecutionException; + +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; + +@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST) +public class CodecCompressionLevelIT extends OpenSearchIntegTestCase { + + public void testLuceneCodecsCreateIndexWithCompressionLevel() { + + internalCluster().ensureAtLeastNumDataNodes(1); + final String index = "test-index"; + + // creating index + assertThrows( + IllegalArgumentException.class, + () -> createIndex( + index, + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC)) + .put("index.codec.compression_level", randomIntBetween(1, 6)) + .build() + ) + ); + + createIndex( + index, + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC)) + .build() + ); + ensureGreen(index); + } + + public void testZStandardCodecsCreateIndexWithCompressionLevel() { + + internalCluster().ensureAtLeastNumDataNodes(1); + final String index = "test-index"; + + // creating index + createIndex( + index, + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.codec", randomFrom(CodecService.ZSTD_CODEC, CodecService.ZSTD_NO_DICT_CODEC)) + .put("index.codec.compression_level", randomIntBetween(1, 6)) + .build() + ); + + ensureGreen(index); + } + + public void testZStandardToLuceneCodecsWithCompressionLevel() throws ExecutionException, InterruptedException { + + internalCluster().ensureAtLeastNumDataNodes(1); + final String index = "test-index"; + + // creating index + createIndex( + index, + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.codec", randomFrom(CodecService.ZSTD_CODEC, CodecService.ZSTD_NO_DICT_CODEC)) + .put("index.codec.compression_level", randomIntBetween(1, 6)) + .build() + ); + ensureGreen(index); + + assertAcked(client().admin().indices().prepareClose(index)); + + Throwable executionException = expectThrows( + ExecutionException.class, + () -> client().admin() + .indices() + .updateSettings( + new UpdateSettingsRequest(index).settings( + Settings.builder().put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC)) + ) + ) + .get() + ); + + Throwable rootCause = Throwables.getRootCause(executionException); + assertEquals(IllegalArgumentException.class, rootCause.getClass()); + assertTrue(rootCause.getMessage().startsWith("Compression level cannot be set")); + + assertAcked( + client().admin() + .indices() + .updateSettings( + new UpdateSettingsRequest(index).settings( + Settings.builder() + .put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC)) + .put("index.codec.compression_level", (String) null) + ) + ) + .get() + ); + + assertAcked(client().admin().indices().prepareOpen(index)); + ensureGreen(index); + } + + public void testLuceneToZStandardCodecsWithCompressionLevel() throws ExecutionException, InterruptedException { + + internalCluster().ensureAtLeastNumDataNodes(1); + final String index = "test-index"; + + // creating index + createIndex( + index, + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC)) + .build() + ); + ensureGreen(index); + + assertAcked(client().admin().indices().prepareClose(index)); + + Throwable executionException = expectThrows( + ExecutionException.class, + () -> client().admin() + .indices() + .updateSettings( + new UpdateSettingsRequest(index).settings( + Settings.builder() + .put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC)) + .put("index.codec.compression_level", randomIntBetween(1, 6)) + ) + ) + .get() + ); + + Throwable rootCause = Throwables.getRootCause(executionException); + assertEquals(IllegalArgumentException.class, rootCause.getClass()); + assertTrue(rootCause.getMessage().startsWith("Compression level cannot be set")); + + assertAcked( + client().admin() + .indices() + .updateSettings( + new UpdateSettingsRequest(index).settings( + Settings.builder() + .put("index.codec", randomFrom(CodecService.ZSTD_CODEC, CodecService.ZSTD_NO_DICT_CODEC)) + .put("index.codec.compression_level", randomIntBetween(1, 6)) + ) + ) + .get() + ); + + assertAcked(client().admin().indices().prepareOpen(index)); + ensureGreen(index); + } + +} diff --git a/server/src/main/java/org/opensearch/index/codec/CodecSettings.java b/server/src/main/java/org/opensearch/index/codec/CodecSettings.java new file mode 100644 index 0000000000000..2d371dfc190db --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/CodecSettings.java @@ -0,0 +1,21 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec; + +import org.apache.lucene.codecs.Codec; +import org.opensearch.common.settings.Setting; + +/** + * This {@link CodecSettings} allows us to manage the settings with {@link Codec}. + * + * @opensearch.internal + */ +public interface CodecSettings { + boolean supports(Setting setting); +} diff --git a/server/src/main/java/org/opensearch/index/codec/customcodecs/ZstdCodec.java b/server/src/main/java/org/opensearch/index/codec/customcodecs/ZstdCodec.java index 04c110fceacdf..042f7eaa29e53 100644 --- a/server/src/main/java/org/opensearch/index/codec/customcodecs/ZstdCodec.java +++ b/server/src/main/java/org/opensearch/index/codec/customcodecs/ZstdCodec.java @@ -9,12 +9,15 @@ package org.opensearch.index.codec.customcodecs; import org.apache.logging.log4j.Logger; +import org.opensearch.common.settings.Setting; +import org.opensearch.index.codec.CodecSettings; +import org.opensearch.index.engine.EngineConfig; import org.opensearch.index.mapper.MapperService; /** * ZstdCodec provides ZSTD compressor using the zstd-jni library. */ -public class ZstdCodec extends Lucene95CustomCodec { +public class ZstdCodec extends Lucene95CustomCodec implements CodecSettings { /** * Creates a new ZstdCodec instance with the default compression level. @@ -41,4 +44,9 @@ public ZstdCodec(MapperService mapperService, Logger logger, int compressionLeve public String toString() { return getClass().getSimpleName(); } + + @Override + public boolean supports(Setting setting) { + return setting.equals(EngineConfig.INDEX_CODEC_COMPRESSION_LEVEL_SETTING); + } } diff --git a/server/src/main/java/org/opensearch/index/codec/customcodecs/ZstdNoDictCodec.java b/server/src/main/java/org/opensearch/index/codec/customcodecs/ZstdNoDictCodec.java index 134f9a14422ad..a7e8e0e42ee68 100644 --- a/server/src/main/java/org/opensearch/index/codec/customcodecs/ZstdNoDictCodec.java +++ b/server/src/main/java/org/opensearch/index/codec/customcodecs/ZstdNoDictCodec.java @@ -9,12 +9,15 @@ package org.opensearch.index.codec.customcodecs; import org.apache.logging.log4j.Logger; +import org.opensearch.common.settings.Setting; +import org.opensearch.index.codec.CodecSettings; +import org.opensearch.index.engine.EngineConfig; import org.opensearch.index.mapper.MapperService; /** * ZstdNoDictCodec provides ZSTD compressor without a dictionary support. */ -public class ZstdNoDictCodec extends Lucene95CustomCodec { +public class ZstdNoDictCodec extends Lucene95CustomCodec implements CodecSettings { /** * Creates a new ZstdNoDictCodec instance with the default compression level. @@ -41,4 +44,9 @@ public ZstdNoDictCodec(MapperService mapperService, Logger logger, int compressi public String toString() { return getClass().getSimpleName(); } + + @Override + public boolean supports(Setting setting) { + return setting.equals(EngineConfig.INDEX_CODEC_COMPRESSION_LEVEL_SETTING); + } } diff --git a/server/src/main/java/org/opensearch/index/engine/EngineConfig.java b/server/src/main/java/org/opensearch/index/engine/EngineConfig.java index 7900e63a95c39..03669eaac0070 100644 --- a/server/src/main/java/org/opensearch/index/engine/EngineConfig.java +++ b/server/src/main/java/org/opensearch/index/engine/EngineConfig.java @@ -48,6 +48,7 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.index.IndexSettings; import org.opensearch.index.codec.CodecService; +import org.opensearch.index.codec.CodecSettings; import org.opensearch.index.mapper.ParsedDocument; import org.opensearch.index.seqno.RetentionLeases; import org.opensearch.core.index.shard.ShardId; @@ -63,6 +64,7 @@ import java.util.Comparator; import java.util.List; import java.util.Objects; +import java.util.Set; import java.util.function.BooleanSupplier; import java.util.function.LongSupplier; import java.util.function.Supplier; @@ -148,13 +150,52 @@ public Supplier retentionLeasesSupplier() { * Compression Level gives a trade-off between compression ratio and speed. The higher compression level results in higher compression ratio but slower compression and decompression speeds. * This setting is not realtime updateable. */ - public static final Setting INDEX_CODEC_COMPRESSION_LEVEL_SETTING = Setting.intSetting( + + public static final Setting INDEX_CODEC_COMPRESSION_LEVEL_SETTING = new Setting<>( "index.codec.compression_level", - 3, - 1, - 6, + Integer.toString(3), + new Setting.IntegerParser(1, 6, "index.codec.compression_level", false), Property.IndexScope - ); + ) { + @Override + public Set getSettingsDependencies(String key) { + return Set.of(new SettingDependency() { + @Override + public Setting getSetting() { + return INDEX_CODEC_SETTING; + } + + @Override + public void validate(String key, Object value, Object dependency) { + if (!(dependency instanceof String)) { + throw new IllegalArgumentException("Codec should be of string type."); + } + doValidateCodecSettings((String) dependency); + } + }); + } + }; + + private static void doValidateCodecSettings(final String codec) { + switch (codec) { + case "zstd": + case "zstd_no_dict": + return; + case "best_compression": + case "lucene_default": + case "default": + break; + default: + if (Codec.availableCodecs().contains(codec)) { + Codec luceneCodec = Codec.forName(codec); + if (luceneCodec instanceof CodecSettings + && ((CodecSettings) luceneCodec).supports(INDEX_CODEC_COMPRESSION_LEVEL_SETTING)) { + return; + } + } + } + throw new IllegalArgumentException("Compression level cannot be set for the " + codec + " codec."); + } /** * Configures an index to optimize documents with auto generated ids for append only. If this setting is updated from false diff --git a/server/src/test/java/org/opensearch/index/codec/CodecTests.java b/server/src/test/java/org/opensearch/index/codec/CodecTests.java index b0d904392407c..0eeeae9e8e59e 100644 --- a/server/src/test/java/org/opensearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/opensearch/index/codec/CodecTests.java @@ -43,12 +43,14 @@ import org.apache.lucene.index.SegmentReader; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; +import org.opensearch.common.settings.IndexScopedSettings; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; import org.opensearch.index.IndexSettings; import org.opensearch.index.analysis.IndexAnalyzers; import org.opensearch.index.codec.customcodecs.Lucene95CustomCodec; import org.opensearch.index.codec.customcodecs.Lucene95CustomStoredFieldsFormat; +import org.opensearch.index.engine.EngineConfig; import org.opensearch.index.mapper.MapperService; import org.opensearch.index.similarity.SimilarityService; import org.opensearch.indices.mapper.MapperRegistry; @@ -60,6 +62,7 @@ import java.util.Collections; import static org.hamcrest.Matchers.instanceOf; +import static org.opensearch.index.engine.EngineConfig.INDEX_CODEC_COMPRESSION_LEVEL_SETTING; @SuppressCodecs("*") // we test against default codec so never get a random one here! public class CodecTests extends OpenSearchTestCase { @@ -96,7 +99,7 @@ public void testZstdNoDict() throws Exception { public void testZstdWithCompressionLevel() throws Exception { int randomCompressionLevel = randomIntBetween(1, 6); - Codec codec = createCodecService(randomCompressionLevel).codec("zstd"); + Codec codec = createCodecService(randomCompressionLevel, "zstd").codec("zstd"); assertStoredFieldsCompressionEquals(Lucene95CustomCodec.Mode.ZSTD, codec); Lucene95CustomStoredFieldsFormat storedFieldsFormat = (Lucene95CustomStoredFieldsFormat) codec.storedFieldsFormat(); assertEquals(randomCompressionLevel, storedFieldsFormat.getCompressionLevel()); @@ -104,12 +107,73 @@ public void testZstdWithCompressionLevel() throws Exception { public void testZstdNoDictWithCompressionLevel() throws Exception { int randomCompressionLevel = randomIntBetween(1, 6); - Codec codec = createCodecService(randomCompressionLevel).codec("zstd_no_dict"); + Codec codec = createCodecService(randomCompressionLevel, "zstd_no_dict").codec("zstd_no_dict"); assertStoredFieldsCompressionEquals(Lucene95CustomCodec.Mode.ZSTD_NO_DICT, codec); Lucene95CustomStoredFieldsFormat storedFieldsFormat = (Lucene95CustomStoredFieldsFormat) codec.storedFieldsFormat(); assertEquals(randomCompressionLevel, storedFieldsFormat.getCompressionLevel()); } + public void testBestCompressionWithCompressionLevel() { + final Settings zstdSettings = Settings.builder() + .put(INDEX_CODEC_COMPRESSION_LEVEL_SETTING.getKey(), randomIntBetween(1, 6)) + .put(EngineConfig.INDEX_CODEC_SETTING.getKey(), randomFrom(CodecService.ZSTD_CODEC, CodecService.ZSTD_NO_DICT_CODEC)) + .build(); + + // able to validate zstd + final IndexScopedSettings zstdIndexScopedSettings = new IndexScopedSettings( + zstdSettings, + IndexScopedSettings.BUILT_IN_INDEX_SETTINGS + ); + zstdIndexScopedSettings.validate(zstdSettings, true); + + final Settings settings = Settings.builder() + .put(INDEX_CODEC_COMPRESSION_LEVEL_SETTING.getKey(), randomIntBetween(1, 6)) + .put(EngineConfig.INDEX_CODEC_SETTING.getKey(), randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC)) + .build(); + final IndexScopedSettings indexScopedSettings = new IndexScopedSettings(settings, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> indexScopedSettings.validate(settings, true)); + assertTrue(e.getMessage().startsWith("Compression level cannot be set")); + } + + public void testLuceneCodecsWithCompressionLevel() { + String codecName = randomFrom(Codec.availableCodecs()); + Codec codec = Codec.forName(codecName); + + final Settings customCodecSettings = Settings.builder() + .put(INDEX_CODEC_COMPRESSION_LEVEL_SETTING.getKey(), randomIntBetween(1, 6)) + .put(EngineConfig.INDEX_CODEC_SETTING.getKey(), "Lucene95CustomCodec") + .build(); + + final IndexScopedSettings customCodecIndexScopedSettings = new IndexScopedSettings( + customCodecSettings, + IndexScopedSettings.BUILT_IN_INDEX_SETTINGS + ); + customCodecIndexScopedSettings.validate(customCodecSettings, true); + + final Settings settings = Settings.builder() + .put(INDEX_CODEC_COMPRESSION_LEVEL_SETTING.getKey(), randomIntBetween(1, 6)) + .put(EngineConfig.INDEX_CODEC_SETTING.getKey(), codecName) + .build(); + final IndexScopedSettings indexScopedSettings = new IndexScopedSettings(settings, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + + if (!(codec instanceof CodecSettings && ((CodecSettings) codec).supports(INDEX_CODEC_COMPRESSION_LEVEL_SETTING))) { + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> indexScopedSettings.validate(settings, true) + ); + assertTrue(e.getMessage().startsWith("Compression level cannot be set")); + } + } + + public void testZstandardCompressionLevelSupport() throws Exception { + CodecService codecService = createCodecService(false); + CodecSettings zstdCodec = (CodecSettings) codecService.codec("zstd"); + CodecSettings zstdNoDictCodec = (CodecSettings) codecService.codec("zstd_no_dict"); + assertTrue(zstdCodec.supports(INDEX_CODEC_COMPRESSION_LEVEL_SETTING)); + assertTrue(zstdNoDictCodec.supports(INDEX_CODEC_COMPRESSION_LEVEL_SETTING)); + } + public void testDefaultMapperServiceNull() throws Exception { Codec codec = createCodecService(true).codec("default"); assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_SPEED, codec); @@ -165,9 +229,10 @@ private CodecService createCodecService(boolean isMapperServiceNull) throws IOEx return buildCodecService(nodeSettings); } - private CodecService createCodecService(int randomCompressionLevel) throws IOException { + private CodecService createCodecService(int randomCompressionLevel, String codec) throws IOException { Settings nodeSettings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put("index.codec", codec) .put("index.codec.compression_level", randomCompressionLevel) .build(); return buildCodecService(nodeSettings); From 97205280d198c115ab33d6cbd4524b56269a0c2f Mon Sep 17 00:00:00 2001 From: Sarthak Aggarwal Date: Wed, 2 Aug 2023 17:40:27 +0530 Subject: [PATCH 47/75] readding codecs in the tests (#8987) Signed-off-by: Sarthak Aggarwal --- .../java/org/opensearch/upgrades/IndexingIT.java | 9 ++++++++- .../indices/replication/SegmentReplicationIT.java | 9 +++++---- .../opensearch/test/OpenSearchIntegTestCase.java | 15 ++++++++++++++- 3 files changed, 27 insertions(+), 6 deletions(-) diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java index b60ee09d39048..a03d299b32274 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java @@ -43,17 +43,20 @@ import org.opensearch.index.codec.CodecService; import org.opensearch.index.engine.EngineConfig; import org.opensearch.indices.replication.common.ReplicationType; +import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.rest.yaml.ObjectPath; import java.io.IOException; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import static org.opensearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING; import static org.opensearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; +import static org.opensearch.test.OpenSearchIntegTestCase.CODECS; /** * Basic test that indexed documents survive the rolling restart. See @@ -267,7 +270,11 @@ public void testIndexingWithSegRep() throws Exception { .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) .put( EngineConfig.INDEX_CODEC_SETTING.getKey(), - randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC, CodecService.LUCENE_DEFAULT_CODEC) + randomFrom(new ArrayList<>(CODECS) { + { + add(CodecService.LUCENE_DEFAULT_CODEC); + } + }) ) .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms"); createIndex(indexName, settings.build()); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java index 3ab1a2a8564c5..08186bf3f9362 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java @@ -201,10 +201,11 @@ public void testReplicationAfterPrimaryRefreshAndFlush() throws Exception { final String nodeB = internalCluster().startDataOnlyNode(); final Settings settings = Settings.builder() .put(indexSettings()) - .put( - EngineConfig.INDEX_CODEC_SETTING.getKey(), - randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC, CodecService.LUCENE_DEFAULT_CODEC) - ) + .put(EngineConfig.INDEX_CODEC_SETTING.getKey(), randomFrom(new ArrayList<>(CODECS) { + { + add(CodecService.LUCENE_DEFAULT_CODEC); + } + })) .build(); createIndex(INDEX_NAME, settings); ensureGreen(INDEX_NAME); diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index 5bba700f53dc4..5e79bec91bd90 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -38,6 +38,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.hc.core5.http.HttpHost; +import org.apache.lucene.codecs.Codec; import org.apache.lucene.search.Sort; import org.apache.lucene.search.TotalHits; import org.apache.lucene.tests.util.LuceneTestCase; @@ -132,6 +133,7 @@ import org.opensearch.index.MockEngineFactoryPlugin; import org.opensearch.index.codec.CodecService; import org.opensearch.index.engine.Segment; +import org.opensearch.index.mapper.CompletionFieldMapper; import org.opensearch.index.mapper.MockFieldFilterPlugin; import org.opensearch.index.store.Store; import org.opensearch.index.translog.Translog; @@ -271,6 +273,17 @@ public abstract class OpenSearchIntegTestCase extends OpenSearchTestCase { */ public static final String SYSPROP_THIRDPARTY = "tests.thirdparty"; + /** + * The lucene_default {@link Codec} is not added to the list as it internally maps to Asserting {@link Codec}. + * The override to fetch the {@link CompletionFieldMapper.CompletionFieldType} postings format is not available for this codec. + */ + public static final List CODECS = List.of( + CodecService.DEFAULT_CODEC, + CodecService.BEST_COMPRESSION_CODEC, + CodecService.ZSTD_CODEC, + CodecService.ZSTD_NO_DICT_CODEC + ); + /** * Annotation for third-party integration tests. *

@@ -427,7 +440,7 @@ protected void randomIndexTemplate() { // otherwise, use it, it has assertions and so on that can find bugs. SuppressCodecs annotation = getClass().getAnnotation(SuppressCodecs.class); if (annotation != null && annotation.value().length == 1 && "*".equals(annotation.value()[0])) { - randomSettingsBuilder.put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC)); + randomSettingsBuilder.put("index.codec", randomFrom(CODECS)); } else { randomSettingsBuilder.put("index.codec", CodecService.LUCENE_DEFAULT_CODEC); } From d916f9c1027f5b2ccff971a66921b7c26db1688f Mon Sep 17 00:00:00 2001 From: Stefano Maglione Date: Wed, 2 Aug 2023 18:11:09 +0200 Subject: [PATCH 48/75] Added featureflag (#8988) Added the featureFlagSettings method to OpenSearchSingleNodeTestCase Signed-off-by: Stefano Maglione --- .../BlobStoreRepositoryRemoteIndexTests.java | 5 ++++- .../blobstore/BlobStoreRepositoryTests.java | 4 ++-- .../opensearch/search/SearchServiceTests.java | 7 ++++++- .../test/OpenSearchSingleNodeTestCase.java | 18 ++++++++++++++++++ 4 files changed, 30 insertions(+), 4 deletions(-) diff --git a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryRemoteIndexTests.java b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryRemoteIndexTests.java index f25498b8c8368..06fd2aaa22039 100644 --- a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryRemoteIndexTests.java +++ b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryRemoteIndexTests.java @@ -64,12 +64,15 @@ * Tests for the {@link BlobStoreRepository} and its subclasses. */ public class BlobStoreRepositoryRemoteIndexTests extends BlobStoreRepositoryHelperTests { + @Override + protected Settings featureFlagSettings() { + return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.REMOTE_STORE, "true").build(); + } @Override protected Settings nodeSettings() { return Settings.builder() .put(super.nodeSettings()) - .put(FeatureFlags.REMOTE_STORE, "true") .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT) .put(CLUSTER_REMOTE_STORE_ENABLED_SETTING.getKey(), true) .put(CLUSTER_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING.getKey(), "test-rs-repo") diff --git a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java index f1253e377c819..26082f2456867 100644 --- a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -106,8 +106,8 @@ protected void assertSnapshotOrGenericThread() { } @Override - protected Settings nodeSettings() { - return Settings.builder().put(super.nodeSettings()).put(FeatureFlags.REMOTE_STORE, "true").build(); + protected Settings featureFlagSettings() { + return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.REMOTE_STORE, "true").build(); } public void testRetrieveSnapshots() throws Exception { diff --git a/server/src/test/java/org/opensearch/search/SearchServiceTests.java b/server/src/test/java/org/opensearch/search/SearchServiceTests.java index 2371c5812814a..876a2d15cad7e 100644 --- a/server/src/test/java/org/opensearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/opensearch/search/SearchServiceTests.java @@ -225,9 +225,14 @@ public void onQueryPhase(SearchContext context, long tookInNanos) { } } + @Override + protected Settings featureFlagSettings() { + return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); + } + @Override protected Settings nodeSettings() { - return Settings.builder().put("search.default_search_timeout", "5s").put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, true).build(); + return Settings.builder().put("search.default_search_timeout", "5s").build(); } public void testClearOnClose() { diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java index 1d7c04227b208..63b486e32ff5b 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java @@ -45,6 +45,8 @@ import org.opensearch.cluster.metadata.Metadata; import org.opensearch.cluster.routing.allocation.DiskThresholdSettings; import org.opensearch.common.Priority; +import org.opensearch.common.settings.FeatureFlagSettings; +import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.BigArrays; @@ -248,6 +250,7 @@ private Node newNode() { .put(FeatureFlags.TELEMETRY_SETTING.getKey(), true) .put(TelemetrySettings.TRACER_ENABLED_SETTING.getKey(), true) .put(nodeSettings()) // allow test cases to provide their own settings or override these + .put(featureFlagSettings()) .build(); Collection> plugins = getPlugins(); @@ -414,4 +417,19 @@ protected boolean forbidPrivateIndexSettings() { return true; } + /** + * Setting all feature flag settings at base IT, which can be overridden later by individual + * IT classes. + * + * @return Feature flag settings. + */ + protected Settings featureFlagSettings() { + Settings.Builder featureSettings = Settings.builder(); + for (Setting builtInFlag : FeatureFlagSettings.BUILT_IN_FEATURE_FLAGS) { + featureSettings.put(builtInFlag.getKey(), builtInFlag.getDefaultRaw(Settings.EMPTY)); + } + featureSettings.put(FeatureFlags.TELEMETRY_SETTING.getKey(), true); + return featureSettings.build(); + } + } From 8afb22a525f4728e90826eabaa74109a9e745182 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Wed, 2 Aug 2023 15:01:24 -0500 Subject: [PATCH 49/75] [Refactor] Network and Transport common classes to Libraries (#9073) This commit refactors the following network and transport libraries to the opensearch common and core libraries respectively: * o.o.common.network.Cidrs -> :libs:opensearch-common * o.o.common.network.InetAddresses -> :libs:opensearch-common * o.o.common.network.NetworkAddress -> :libs:opensearch-common * o.o.common.transport.NetworkExceptionHelper -> :libs:opensearch-common * o.o.common.transport.PortsRange -> :libs:opensearch-common * o.o.common.transport.TransportAddress -> :libs:opensearch-core * o.o.common.transport.BoundTransportAddress -> :libs:opensearch-core * o.o.transport.TransportMessage -> :libs:opensearch-core * o.o.transport.TransportResponse -> :libs:opensearch-core The purpose is to reduce the change surface area of the core APIs to minimize impact to downstream consumers while moving toward establishing a formal API for cloud native or serverless implementations. Signed-off-by: Nicholas Walter Knize --- .../benchmark/routing/allocation/Allocators.java | 2 +- .../client/tasks/CancelTasksResponseTests.java | 2 +- .../main/java/org/opensearch/common/network/Cidrs.java | 0 .../org/opensearch/common/network/InetAddresses.java | 0 .../org/opensearch/common/network/NetworkAddress.java | 0 .../org/opensearch/common/network/package-info.java | 10 ++++++++++ .../common/transport/NetworkExceptionHelper.java | 0 .../org/opensearch/common/transport/PortsRange.java | 0 .../org/opensearch/common/transport/package-info.java | 2 +- .../java/org/opensearch/common/network/CidrsTests.java | 0 .../opensearch/common/network/InetAddressesTests.java | 0 .../opensearch/common/network/NetworkAddressTests.java | 0 .../core}/common/transport/BoundTransportAddress.java | 4 ++-- .../core}/common/transport/TransportAddress.java | 4 ++-- .../opensearch/core/common/transport/package-info.java | 10 ++++++++++ .../opensearch/core}/transport/TransportMessage.java | 4 ++-- .../opensearch/core}/transport/TransportResponse.java | 2 +- .../org/opensearch/core/transport/package-info.java | 10 ++++++++++ .../index/reindex/ReindexFromRemoteWithAuthTests.java | 2 +- .../java/org/opensearch/index/reindex/RetryTests.java | 2 +- .../java/org/opensearch/http/netty4/Netty4Http2IT.java | 2 +- .../http/netty4/Netty4HttpRequestSizeLimitIT.java | 2 +- .../org/opensearch/http/netty4/Netty4PipeliningIT.java | 2 +- .../netty4/Netty4TransportMultiPortIntegrationIT.java | 4 ++-- .../netty4/Netty4TransportPublishAddressIT.java | 4 ++-- .../opensearch/http/netty4/Netty4BadRequestTests.java | 2 +- .../http/netty4/Netty4HttpServerPipeliningTests.java | 2 +- .../http/netty4/Netty4HttpServerTransportTests.java | 2 +- .../netty4/Netty4SizeHeaderFrameDecoderTests.java | 2 +- .../transport/netty4/SimpleNetty4TransportTests.java | 2 +- .../classic/AbstractAzureComputeServiceTestCase.java | 2 +- .../azure/classic/AzureSeedHostsProvider.java | 2 +- .../discovery/ec2/AwsEc2SeedHostsProvider.java | 2 +- .../opensearch/discovery/ec2/Ec2DiscoveryTests.java | 2 +- .../org/opensearch/discovery/ec2/Ec2RetriesTests.java | 2 +- .../opensearch/discovery/gce/GceSeedHostsProvider.java | 2 +- .../opensearch/discovery/gce/GceDiscoveryTests.java | 2 +- .../java/org/opensearch/http/nio/NioPipeliningIT.java | 2 +- .../http/nio/NioHttpServerTransportTests.java | 2 +- .../transport/nio/SimpleNioTransportTests.java | 2 +- .../TransportReplicationActionRetryOnClosedNodeIT.java | 2 +- .../action/ActionListenerResponseHandler.java | 2 +- .../java/org/opensearch/action/ActionResponse.java | 2 +- .../action/admin/cluster/stats/ClusterStatsNodes.java | 2 +- .../action/search/ClearScrollController.java | 2 +- .../org/opensearch/action/search/DeletePitInfo.java | 2 +- .../action/search/SearchTransportService.java | 2 +- .../action/search/UpdatePitContextResponse.java | 2 +- .../action/support/ChannelActionListener.java | 2 +- .../support/broadcast/BroadcastShardResponse.java | 2 +- .../broadcast/node/TransportBroadcastByNodeAction.java | 2 +- .../action/support/nodes/BaseNodeResponse.java | 2 +- .../action/support/tasks/TransportTasksAction.java | 2 +- .../main/java/org/opensearch/bootstrap/Bootstrap.java | 2 +- .../java/org/opensearch/bootstrap/BootstrapChecks.java | 4 ++-- .../opensearch/cluster/ClusterSettingsResponse.java | 2 +- .../cluster/action/index/NodeMappingRefreshAction.java | 2 +- .../cluster/action/shard/ShardStateAction.java | 2 +- .../coordination/ClusterFormationFailureHelper.java | 2 +- .../opensearch/cluster/coordination/Coordinator.java | 4 ++-- .../cluster/coordination/FollowersChecker.java | 2 +- .../opensearch/cluster/coordination/JoinHelper.java | 4 ++-- .../opensearch/cluster/coordination/LeaderChecker.java | 4 ++-- .../opensearch/cluster/coordination/PeersResponse.java | 2 +- .../cluster/coordination/PreVoteResponse.java | 2 +- .../opensearch/cluster/coordination/Publication.java | 2 +- .../coordination/PublicationTransportHandler.java | 2 +- .../cluster/coordination/PublishWithJoinResponse.java | 2 +- .../org/opensearch/cluster/node/DiscoveryNode.java | 2 +- .../opensearch/cluster/node/DiscoveryNodeFilters.java | 2 +- .../org/opensearch/cluster/node/DiscoveryNodes.java | 2 +- .../java/org/opensearch/discovery/DiscoveryModule.java | 2 +- .../discovery/FileBasedSeedHostsProvider.java | 2 +- .../HandshakingTransportAddressConnector.java | 2 +- .../discovery/InitializeExtensionResponse.java | 2 +- .../main/java/org/opensearch/discovery/PeerFinder.java | 2 +- .../org/opensearch/discovery/SeedHostsProvider.java | 2 +- .../org/opensearch/discovery/SeedHostsResolver.java | 2 +- .../discovery/SettingsBasedSeedHostsProvider.java | 2 +- .../opensearch/env/EnvironmentSettingsResponse.java | 2 +- .../opensearch/extensions/AcknowledgedResponse.java | 2 +- .../AddSettingsUpdateConsumerRequestHandler.java | 2 +- .../opensearch/extensions/DiscoveryExtensionNode.java | 2 +- .../extensions/ExtensionDependencyResponse.java | 2 +- .../org/opensearch/extensions/ExtensionsManager.java | 4 ++-- .../action/ExtensionTransportActionsHandler.java | 2 +- .../extensions/rest/RestActionsRequestHandler.java | 2 +- .../rest/RestExecuteOnExtensionResponse.java | 2 +- .../settings/CustomSettingsRequestHandler.java | 2 +- .../gateway/LocalAllocateDangledIndices.java | 2 +- .../opensearch/http/AbstractHttpServerTransport.java | 4 ++-- server/src/main/java/org/opensearch/http/HttpInfo.java | 4 ++-- .../java/org/opensearch/http/HttpServerTransport.java | 2 +- .../org/opensearch/index/IndicesModuleResponse.java | 2 +- .../indices/recovery/PeerRecoveryTargetService.java | 2 +- .../opensearch/indices/recovery/RecoveryResponse.java | 2 +- .../recovery/RecoveryTranslogOperationsResponse.java | 2 +- .../indices/recovery/RemoteRecoveryTargetHandler.java | 2 +- .../indices/recovery/RetryableTransportClient.java | 2 +- .../indices/replication/CheckpointInfoResponse.java | 2 +- .../indices/replication/GetSegmentFilesResponse.java | 2 +- .../replication/RemoteSegmentFileChunkWriter.java | 2 +- .../replication/SegmentReplicationSourceService.java | 2 +- .../replication/SegmentReplicationTargetService.java | 2 +- .../indices/replication/common/ReplicationTarget.java | 2 +- .../org/opensearch/indices/store/IndicesStore.java | 2 +- server/src/main/java/org/opensearch/node/Node.java | 4 ++-- .../org/opensearch/node/NodeValidationException.java | 2 +- .../java/org/opensearch/plugins/DiscoveryPlugin.java | 3 ++- .../repositories/VerifyNodeRepositoryAction.java | 2 +- .../opensearch/rest/action/cat/RestNodesAction.java | 2 +- .../java/org/opensearch/search/SearchPhaseResult.java | 2 +- .../org/opensearch/tasks/TaskCancellationService.java | 2 +- .../opensearch/transport/ActionTransportException.java | 2 +- .../transport/EmptyTransportResponseHandler.java | 1 + .../java/org/opensearch/transport/InboundHandler.java | 3 ++- .../java/org/opensearch/transport/OutboundHandler.java | 3 ++- .../org/opensearch/transport/PlainTransportFuture.java | 1 + .../opensearch/transport/ProxyConnectionStrategy.java | 2 +- .../opensearch/transport/RemoteTransportException.java | 2 +- .../opensearch/transport/SniffConnectionStrategy.java | 2 +- .../org/opensearch/transport/TaskTransportChannel.java | 1 + .../java/org/opensearch/transport/TcpTransport.java | 4 ++-- .../org/opensearch/transport/TcpTransportChannel.java | 1 + .../main/java/org/opensearch/transport/Transport.java | 5 +++-- .../org/opensearch/transport/TransportActionProxy.java | 1 + .../org/opensearch/transport/TransportChannel.java | 1 + .../org/opensearch/transport/TransportHandshaker.java | 1 + .../java/org/opensearch/transport/TransportInfo.java | 4 ++-- .../org/opensearch/transport/TransportInterceptor.java | 1 + .../opensearch/transport/TransportMessageListener.java | 1 + .../org/opensearch/transport/TransportRequest.java | 1 + .../opensearch/transport/TransportResponseHandler.java | 1 + .../org/opensearch/transport/TransportService.java | 5 +++-- .../org/opensearch/ExceptionSerializationTests.java | 2 +- .../admin/cluster/node/tasks/TaskManagerTestCase.java | 2 +- .../admin/cluster/node/tasks/TestTaskPlugin.java | 2 +- .../cluster/reroute/ClusterRerouteResponseTests.java | 2 +- .../shards/ClusterSearchShardsResponseTests.java | 2 +- .../TransportVerifyShardBeforeCloseActionTests.java | 2 +- .../action/bulk/TransportShardBulkActionTests.java | 2 +- .../action/search/ClearScrollControllerTests.java | 2 +- .../action/search/TransportSearchActionTests.java | 2 +- .../node/TransportBroadcastByNodeActionTests.java | 2 +- .../support/replication/ReplicationOperationTests.java | 2 +- .../replication/TransportReplicationActionTests.java | 2 +- .../TransportWriteActionForIndexingPressureTests.java | 2 +- .../support/replication/TransportWriteActionTests.java | 2 +- .../org/opensearch/bootstrap/BootstrapChecksTests.java | 4 ++-- .../java/org/opensearch/cluster/ClusterStateTests.java | 2 +- .../cluster/NodeConnectionsServiceTests.java | 4 ++-- .../cluster/action/shard/ShardStateActionTests.java | 2 +- .../ClusterFormationFailureHelperTests.java | 2 +- .../cluster/coordination/CoordinationStateTests.java | 2 +- .../cluster/coordination/FollowersCheckerTests.java | 4 ++-- .../cluster/coordination/JoinHelperTests.java | 2 +- .../cluster/coordination/LeaderCheckerTests.java | 4 ++-- .../opensearch/cluster/coordination/NodeJoinTests.java | 2 +- .../cluster/coordination/PublicationTests.java | 2 +- .../cluster/node/DiscoveryNodeFiltersTests.java | 2 +- .../opensearch/cluster/node/DiscoveryNodeTests.java | 2 +- .../opensearch/cluster/node/DiscoveryNodesTests.java | 2 +- .../opensearch/cluster/routing/RoutingNodeTests.java | 2 +- .../opensearch/common/network/NetworkModuleTests.java | 2 +- .../common/transport/BoundTransportAddressTests.java | 2 ++ .../discovery/FileBasedSeedHostsProviderTests.java | 4 ++-- .../HandshakingTransportAddressConnectorTests.java | 2 +- .../discovery/InitializeExtensionRequestTests.java | 2 +- .../java/org/opensearch/discovery/PeerFinderTests.java | 2 +- .../opensearch/discovery/SeedHostsResolverTests.java | 4 ++-- .../discovery/SettingsBasedSeedHostsProviderTests.java | 2 +- .../extensions/DiscoveryExtensionNodeTests.java | 2 +- .../opensearch/extensions/ExtensionsManagerTests.java | 4 ++-- .../action/ExtensionTransportActionsHandlerTests.java | 2 +- .../rest/RestSendToExtensionActionTests.java | 2 +- .../org/opensearch/gateway/GatewayServiceTests.java | 2 +- .../http/AbstractHttpServerTransportTests.java | 2 +- .../test/java/org/opensearch/http/HttpInfoTests.java | 4 ++-- .../SegmentReplicationSourceServiceTests.java | 2 +- .../SegmentReplicationTargetServiceTests.java | 2 +- .../src/test/java/org/opensearch/node/NodeTests.java | 2 +- .../opensearch/node/ResponseCollectorServiceTests.java | 2 +- .../opensearch/nodesinfo/NodeInfoStreamingTests.java | 4 ++-- .../org/opensearch/rest/BytesRestResponseTests.java | 2 +- .../java/org/opensearch/rest/RestControllerTests.java | 4 ++-- .../rest/action/RestBuilderListenerTests.java | 4 ++-- .../opensearch/snapshots/SnapshotResiliencyTests.java | 2 +- .../transport/ClusterConnectionManagerTests.java | 2 +- .../org/opensearch/transport/InboundDecoderTests.java | 1 + .../org/opensearch/transport/OutboundHandlerTests.java | 3 ++- .../transport/ProxyConnectionStrategyTests.java | 2 +- .../transport/RemoteClusterConnectionTests.java | 2 +- .../transport/RemoteConnectionManagerTests.java | 2 +- .../transport/SniffConnectionStrategyTests.java | 2 +- .../org/opensearch/transport/TcpTransportTests.java | 2 +- .../transport/TransportActionProxyTests.java | 1 + .../opensearch/transport/TransportHandshakerTests.java | 1 + .../org/opensearch/transport/TransportInfoTests.java | 4 ++-- .../TransportServiceDeserializationFailureTests.java | 1 + .../coordination/AbstractCoordinatorTestCase.java | 2 +- .../src/main/java/org/opensearch/node/MockNode.java | 2 +- .../java/org/opensearch/test/ExternalTestCluster.java | 2 +- .../java/org/opensearch/test/MockHttpTransport.java | 4 ++-- .../org/opensearch/test/OpenSearchIntegTestCase.java | 2 +- .../java/org/opensearch/test/OpenSearchTestCase.java | 2 +- .../test/disruption/DisruptableMockTransport.java | 6 +++--- .../org/opensearch/test/transport/FakeTransport.java | 4 ++-- .../org/opensearch/test/transport/MockTransport.java | 4 ++-- .../test/transport/MockTransportService.java | 4 ++-- .../test/transport/StubbableConnectionManager.java | 2 +- .../opensearch/test/transport/StubbableTransport.java | 4 ++-- .../transport/AbstractSimpleTransportTestCase.java | 5 +++-- .../java/org/opensearch/transport/TestResponse.java | 1 + .../org/opensearch/transport/TestTransportChannel.java | 1 + .../test/disruption/DisruptableMockTransportTests.java | 6 +++--- .../test/disruption/NetworkDisruptionIT.java | 2 +- .../transport/nio/SimpleMockNioTransportTests.java | 2 +- 217 files changed, 283 insertions(+), 227 deletions(-) rename {server => libs/common}/src/main/java/org/opensearch/common/network/Cidrs.java (100%) rename {server => libs/common}/src/main/java/org/opensearch/common/network/InetAddresses.java (100%) rename {server => libs/common}/src/main/java/org/opensearch/common/network/NetworkAddress.java (100%) create mode 100644 libs/common/src/main/java/org/opensearch/common/network/package-info.java rename {server => libs/common}/src/main/java/org/opensearch/common/transport/NetworkExceptionHelper.java (100%) rename {server => libs/common}/src/main/java/org/opensearch/common/transport/PortsRange.java (100%) rename {server => libs/common}/src/main/java/org/opensearch/common/transport/package-info.java (79%) rename {server => libs/common}/src/test/java/org/opensearch/common/network/CidrsTests.java (100%) rename {server => libs/common}/src/test/java/org/opensearch/common/network/InetAddressesTests.java (100%) rename {server => libs/common}/src/test/java/org/opensearch/common/network/NetworkAddressTests.java (100%) rename {server/src/main/java/org/opensearch => libs/core/src/main/java/org/opensearch/core}/common/transport/BoundTransportAddress.java (98%) rename {server/src/main/java/org/opensearch => libs/core/src/main/java/org/opensearch/core}/common/transport/TransportAddress.java (99%) create mode 100644 libs/core/src/main/java/org/opensearch/core/common/transport/package-info.java rename {server/src/main/java/org/opensearch => libs/core/src/main/java/org/opensearch/core}/transport/TransportMessage.java (94%) rename {server/src/main/java/org/opensearch => libs/core/src/main/java/org/opensearch/core}/transport/TransportResponse.java (98%) create mode 100644 libs/core/src/main/java/org/opensearch/core/transport/package-info.java diff --git a/benchmarks/src/main/java/org/opensearch/benchmark/routing/allocation/Allocators.java b/benchmarks/src/main/java/org/opensearch/benchmark/routing/allocation/Allocators.java index d700b9dab2cf3..25a7bf3d98f0a 100644 --- a/benchmarks/src/main/java/org/opensearch/benchmark/routing/allocation/Allocators.java +++ b/benchmarks/src/main/java/org/opensearch/benchmark/routing/allocation/Allocators.java @@ -45,7 +45,7 @@ import org.opensearch.cluster.routing.allocation.decider.AllocationDeciders; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.set.Sets; import org.opensearch.gateway.GatewayAllocator; import org.opensearch.snapshots.EmptySnapshotsInfoService; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/tasks/CancelTasksResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/tasks/CancelTasksResponseTests.java index e6411b615df07..0e9bce7ce0b58 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/tasks/CancelTasksResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/tasks/CancelTasksResponseTests.java @@ -40,7 +40,7 @@ import org.opensearch.cluster.node.DiscoveryNodes; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/main/java/org/opensearch/common/network/Cidrs.java b/libs/common/src/main/java/org/opensearch/common/network/Cidrs.java similarity index 100% rename from server/src/main/java/org/opensearch/common/network/Cidrs.java rename to libs/common/src/main/java/org/opensearch/common/network/Cidrs.java diff --git a/server/src/main/java/org/opensearch/common/network/InetAddresses.java b/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java similarity index 100% rename from server/src/main/java/org/opensearch/common/network/InetAddresses.java rename to libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java diff --git a/server/src/main/java/org/opensearch/common/network/NetworkAddress.java b/libs/common/src/main/java/org/opensearch/common/network/NetworkAddress.java similarity index 100% rename from server/src/main/java/org/opensearch/common/network/NetworkAddress.java rename to libs/common/src/main/java/org/opensearch/common/network/NetworkAddress.java diff --git a/libs/common/src/main/java/org/opensearch/common/network/package-info.java b/libs/common/src/main/java/org/opensearch/common/network/package-info.java new file mode 100644 index 0000000000000..92e4eac5bde42 --- /dev/null +++ b/libs/common/src/main/java/org/opensearch/common/network/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** common network layer classes used across the code base */ +package org.opensearch.common.network; diff --git a/server/src/main/java/org/opensearch/common/transport/NetworkExceptionHelper.java b/libs/common/src/main/java/org/opensearch/common/transport/NetworkExceptionHelper.java similarity index 100% rename from server/src/main/java/org/opensearch/common/transport/NetworkExceptionHelper.java rename to libs/common/src/main/java/org/opensearch/common/transport/NetworkExceptionHelper.java diff --git a/server/src/main/java/org/opensearch/common/transport/PortsRange.java b/libs/common/src/main/java/org/opensearch/common/transport/PortsRange.java similarity index 100% rename from server/src/main/java/org/opensearch/common/transport/PortsRange.java rename to libs/common/src/main/java/org/opensearch/common/transport/PortsRange.java diff --git a/server/src/main/java/org/opensearch/common/transport/package-info.java b/libs/common/src/main/java/org/opensearch/common/transport/package-info.java similarity index 79% rename from server/src/main/java/org/opensearch/common/transport/package-info.java rename to libs/common/src/main/java/org/opensearch/common/transport/package-info.java index abb8dfbb4e4f0..7d28ac6c60a14 100644 --- a/server/src/main/java/org/opensearch/common/transport/package-info.java +++ b/libs/common/src/main/java/org/opensearch/common/transport/package-info.java @@ -6,5 +6,5 @@ * compatible open source license. */ -/** Base Transport utility package. */ +/** common transport layer classes used across the code base */ package org.opensearch.common.transport; diff --git a/server/src/test/java/org/opensearch/common/network/CidrsTests.java b/libs/common/src/test/java/org/opensearch/common/network/CidrsTests.java similarity index 100% rename from server/src/test/java/org/opensearch/common/network/CidrsTests.java rename to libs/common/src/test/java/org/opensearch/common/network/CidrsTests.java diff --git a/server/src/test/java/org/opensearch/common/network/InetAddressesTests.java b/libs/common/src/test/java/org/opensearch/common/network/InetAddressesTests.java similarity index 100% rename from server/src/test/java/org/opensearch/common/network/InetAddressesTests.java rename to libs/common/src/test/java/org/opensearch/common/network/InetAddressesTests.java diff --git a/server/src/test/java/org/opensearch/common/network/NetworkAddressTests.java b/libs/common/src/test/java/org/opensearch/common/network/NetworkAddressTests.java similarity index 100% rename from server/src/test/java/org/opensearch/common/network/NetworkAddressTests.java rename to libs/common/src/test/java/org/opensearch/common/network/NetworkAddressTests.java diff --git a/server/src/main/java/org/opensearch/common/transport/BoundTransportAddress.java b/libs/core/src/main/java/org/opensearch/core/common/transport/BoundTransportAddress.java similarity index 98% rename from server/src/main/java/org/opensearch/common/transport/BoundTransportAddress.java rename to libs/core/src/main/java/org/opensearch/core/common/transport/BoundTransportAddress.java index 3a9c337f2d950..8908a172395f2 100644 --- a/server/src/main/java/org/opensearch/common/transport/BoundTransportAddress.java +++ b/libs/core/src/main/java/org/opensearch/core/common/transport/BoundTransportAddress.java @@ -30,12 +30,12 @@ * GitHub history for details. */ -package org.opensearch.common.transport; +package org.opensearch.core.common.transport; +import org.opensearch.common.network.InetAddresses; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.network.InetAddresses; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/common/transport/TransportAddress.java b/libs/core/src/main/java/org/opensearch/core/common/transport/TransportAddress.java similarity index 99% rename from server/src/main/java/org/opensearch/common/transport/TransportAddress.java rename to libs/core/src/main/java/org/opensearch/core/common/transport/TransportAddress.java index 737e8f3496143..1a853877ed0b9 100644 --- a/server/src/main/java/org/opensearch/common/transport/TransportAddress.java +++ b/libs/core/src/main/java/org/opensearch/core/common/transport/TransportAddress.java @@ -30,12 +30,12 @@ * GitHub history for details. */ -package org.opensearch.common.transport; +package org.opensearch.core.common.transport; +import org.opensearch.common.network.NetworkAddress; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.network.NetworkAddress; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/libs/core/src/main/java/org/opensearch/core/common/transport/package-info.java b/libs/core/src/main/java/org/opensearch/core/common/transport/package-info.java new file mode 100644 index 0000000000000..21d2abfce958a --- /dev/null +++ b/libs/core/src/main/java/org/opensearch/core/common/transport/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Common / Base Transport classes used to implement the OpenSearch transport layer */ +package org.opensearch.core.common.transport; diff --git a/server/src/main/java/org/opensearch/transport/TransportMessage.java b/libs/core/src/main/java/org/opensearch/core/transport/TransportMessage.java similarity index 94% rename from server/src/main/java/org/opensearch/transport/TransportMessage.java rename to libs/core/src/main/java/org/opensearch/core/transport/TransportMessage.java index 78216047d530e..941babda40aa3 100644 --- a/server/src/main/java/org/opensearch/transport/TransportMessage.java +++ b/libs/core/src/main/java/org/opensearch/core/transport/TransportMessage.java @@ -30,11 +30,11 @@ * GitHub history for details. */ -package org.opensearch.transport; +package org.opensearch.core.transport; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; /** * Message over the transport interface diff --git a/server/src/main/java/org/opensearch/transport/TransportResponse.java b/libs/core/src/main/java/org/opensearch/core/transport/TransportResponse.java similarity index 98% rename from server/src/main/java/org/opensearch/transport/TransportResponse.java rename to libs/core/src/main/java/org/opensearch/core/transport/TransportResponse.java index 73713fa1447a8..038069e93a51b 100644 --- a/server/src/main/java/org/opensearch/transport/TransportResponse.java +++ b/libs/core/src/main/java/org/opensearch/core/transport/TransportResponse.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.opensearch.transport; +package org.opensearch.core.transport; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; diff --git a/libs/core/src/main/java/org/opensearch/core/transport/package-info.java b/libs/core/src/main/java/org/opensearch/core/transport/package-info.java new file mode 100644 index 0000000000000..91db839f40305 --- /dev/null +++ b/libs/core/src/main/java/org/opensearch/core/transport/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Core Transport Layer classes used across the OpenSearch core */ +package org.opensearch.core.transport; diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java index 97f43b9439408..03d6fafccfea3 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java @@ -50,7 +50,7 @@ import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.network.NetworkModule; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java index e239018e0ce31..89eb8fc7e15a3 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java @@ -42,7 +42,7 @@ import org.opensearch.client.Client; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; import org.opensearch.http.HttpInfo; import org.opensearch.index.query.QueryBuilders; diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4Http2IT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4Http2IT.java index c066f3edf6900..baa306aa0624b 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4Http2IT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4Http2IT.java @@ -13,7 +13,7 @@ import io.netty.util.ReferenceCounted; import org.opensearch.OpenSearchNetty4IntegTestCase; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.http.HttpServerTransport; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java index 95440e28686e7..d01f72ac88c9d 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4HttpRequestSizeLimitIT.java @@ -38,7 +38,7 @@ import org.opensearch.OpenSearchNetty4IntegTestCase; import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.http.HttpServerTransport; diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4PipeliningIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4PipeliningIT.java index 96193b0ecb954..d891284f53205 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4PipeliningIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/http/netty4/Netty4PipeliningIT.java @@ -35,7 +35,7 @@ import io.netty.handler.codec.http.FullHttpResponse; import io.netty.util.ReferenceCounted; import org.opensearch.OpenSearchNetty4IntegTestCase; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.http.HttpServerTransport; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java index 3ff3938d23f65..4004d3d1a029d 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportMultiPortIntegrationIT.java @@ -36,8 +36,8 @@ import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; import org.opensearch.common.network.NetworkAddress; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; import org.opensearch.test.junit.annotations.Network; diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportPublishAddressIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportPublishAddressIT.java index e6604abf126da..4722cdb66be18 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportPublishAddressIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/transport/netty4/Netty4TransportPublishAddressIT.java @@ -38,8 +38,8 @@ import org.opensearch.common.network.NetworkModule; import org.opensearch.common.network.NetworkUtils; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.transport.Netty4ModulePlugin; import org.opensearch.transport.TransportInfo; diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java index 5fcfc4ee5e151..1b175d7991e32 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4BadRequestTests.java @@ -38,7 +38,7 @@ import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.common.util.concurrent.ThreadContext; diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java index 6c8cf69afb148..ca8dfb616e313 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerPipeliningTests.java @@ -45,7 +45,7 @@ import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.http.HttpPipelinedRequest; diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java index d23edfda829f9..af4ded2255c9c 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpServerTransportTests.java @@ -64,7 +64,7 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.MockBigArrays; diff --git a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java index 5c1c5970a7cfb..db7347bf99345 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java @@ -36,7 +36,7 @@ import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; diff --git a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java index ee2bf12a4246f..27bfaef15ca86 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java @@ -39,7 +39,7 @@ import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.common.util.io.IOUtils; import org.opensearch.common.util.net.NetUtils; diff --git a/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java b/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java index f95f358532bac..d5d6aae23f344 100644 --- a/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java +++ b/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java @@ -47,7 +47,7 @@ import org.opensearch.common.network.NetworkAddress; import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.discovery.azure.classic.AzureSeedHostsProvider; import org.opensearch.plugin.discovery.azure.classic.AzureDiscoveryPlugin; import org.opensearch.plugins.Plugin; diff --git a/plugins/discovery-azure-classic/src/main/java/org/opensearch/discovery/azure/classic/AzureSeedHostsProvider.java b/plugins/discovery-azure-classic/src/main/java/org/opensearch/discovery/azure/classic/AzureSeedHostsProvider.java index e2bc180876a17..89970fdfc322e 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/opensearch/discovery/azure/classic/AzureSeedHostsProvider.java +++ b/plugins/discovery-azure-classic/src/main/java/org/opensearch/discovery/azure/classic/AzureSeedHostsProvider.java @@ -48,7 +48,7 @@ import org.opensearch.common.network.NetworkAddress; import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.common.Strings; import org.opensearch.discovery.SeedHostsProvider; diff --git a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java index 4afdff7d2c272..0724dbe4543b8 100644 --- a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java @@ -47,7 +47,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.SingleObjectCache; import org.opensearch.discovery.SeedHostsProvider; diff --git a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java index afa35f63ae4dc..07400f2126fe5 100644 --- a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryTests.java @@ -44,7 +44,7 @@ import org.opensearch.common.io.Streams; import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.transport.MockTransportService; diff --git a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java index ea10d03576d94..0f7a86bf76622 100644 --- a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java +++ b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2RetriesTests.java @@ -41,7 +41,7 @@ import org.opensearch.common.io.Streams; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.discovery.SeedHostsProvider; import org.opensearch.discovery.SeedHostsResolver; diff --git a/plugins/discovery-gce/src/main/java/org/opensearch/discovery/gce/GceSeedHostsProvider.java b/plugins/discovery-gce/src/main/java/org/opensearch/discovery/gce/GceSeedHostsProvider.java index dfd60f52730a6..3295273c83598 100644 --- a/plugins/discovery-gce/src/main/java/org/opensearch/discovery/gce/GceSeedHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/opensearch/discovery/gce/GceSeedHostsProvider.java @@ -46,7 +46,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.common.Strings; import org.opensearch.discovery.SeedHostsProvider; diff --git a/plugins/discovery-gce/src/test/java/org/opensearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/opensearch/discovery/gce/GceDiscoveryTests.java index 2ca1234bb8a04..c63085deb466f 100644 --- a/plugins/discovery-gce/src/test/java/org/opensearch/discovery/gce/GceDiscoveryTests.java +++ b/plugins/discovery-gce/src/test/java/org/opensearch/discovery/gce/GceDiscoveryTests.java @@ -37,7 +37,7 @@ import org.opensearch.cloud.gce.GceMetadataService; import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.transport.MockTransportService; import org.opensearch.threadpool.TestThreadPool; diff --git a/plugins/transport-nio/src/internalClusterTest/java/org/opensearch/http/nio/NioPipeliningIT.java b/plugins/transport-nio/src/internalClusterTest/java/org/opensearch/http/nio/NioPipeliningIT.java index ac06bf03ed8cd..9afb8e37cd9a9 100644 --- a/plugins/transport-nio/src/internalClusterTest/java/org/opensearch/http/nio/NioPipeliningIT.java +++ b/plugins/transport-nio/src/internalClusterTest/java/org/opensearch/http/nio/NioPipeliningIT.java @@ -34,7 +34,7 @@ import io.netty.handler.codec.http.FullHttpResponse; import org.opensearch.NioIntegTestCase; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.http.HttpServerTransport; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; diff --git a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpServerTransportTests.java b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpServerTransportTests.java index 4d0db18d433ec..22bda4881c322 100644 --- a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpServerTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpServerTransportTests.java @@ -53,7 +53,7 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.MockBigArrays; diff --git a/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java b/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java index d0e779edded7d..d7f603031ac17 100644 --- a/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java @@ -39,7 +39,7 @@ import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.common.util.io.IOUtils; import org.opensearch.common.util.net.NetUtils; diff --git a/server/src/internalClusterTest/java/org/opensearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java b/server/src/internalClusterTest/java/org/opensearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java index 9f60e65eca297..7d663dd70edd6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java @@ -60,7 +60,7 @@ import org.opensearch.transport.TransportInterceptor; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestOptions; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; diff --git a/server/src/main/java/org/opensearch/action/ActionListenerResponseHandler.java b/server/src/main/java/org/opensearch/action/ActionListenerResponseHandler.java index 7899324a3301e..2f376d81fa202 100644 --- a/server/src/main/java/org/opensearch/action/ActionListenerResponseHandler.java +++ b/server/src/main/java/org/opensearch/action/ActionListenerResponseHandler.java @@ -37,7 +37,7 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportException; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.Objects; diff --git a/server/src/main/java/org/opensearch/action/ActionResponse.java b/server/src/main/java/org/opensearch/action/ActionResponse.java index fd13971433d8b..e1d4da760b35b 100644 --- a/server/src/main/java/org/opensearch/action/ActionResponse.java +++ b/server/src/main/java/org/opensearch/action/ActionResponse.java @@ -33,7 +33,7 @@ package org.opensearch.action; import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodes.java b/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodes.java index bddb3fb746eb1..6dd7e09aeae0d 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -41,7 +41,7 @@ import org.opensearch.common.metrics.OperationStats; import org.opensearch.common.network.NetworkModule; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.common.Strings; diff --git a/server/src/main/java/org/opensearch/action/search/ClearScrollController.java b/server/src/main/java/org/opensearch/action/search/ClearScrollController.java index eb0fa49a94050..c258b111fa1c6 100644 --- a/server/src/main/java/org/opensearch/action/search/ClearScrollController.java +++ b/server/src/main/java/org/opensearch/action/search/ClearScrollController.java @@ -41,7 +41,7 @@ import org.opensearch.common.util.concurrent.CountDown; import org.opensearch.core.common.Strings; import org.opensearch.transport.Transport; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.util.ArrayList; import java.util.Collection; diff --git a/server/src/main/java/org/opensearch/action/search/DeletePitInfo.java b/server/src/main/java/org/opensearch/action/search/DeletePitInfo.java index 1e616ab5ca16e..b33f8a46c8f7a 100644 --- a/server/src/main/java/org/opensearch/action/search/DeletePitInfo.java +++ b/server/src/main/java/org/opensearch/action/search/DeletePitInfo.java @@ -15,7 +15,7 @@ import org.opensearch.core.xcontent.ConstructingObjectParser; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/action/search/SearchTransportService.java b/server/src/main/java/org/opensearch/action/search/SearchTransportService.java index 5a280818640ed..37ffca6cac5f2 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/opensearch/action/search/SearchTransportService.java @@ -65,7 +65,7 @@ import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestOptions; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/action/search/UpdatePitContextResponse.java b/server/src/main/java/org/opensearch/action/search/UpdatePitContextResponse.java index da39aed20ef8e..1db8fc48c28bc 100644 --- a/server/src/main/java/org/opensearch/action/search/UpdatePitContextResponse.java +++ b/server/src/main/java/org/opensearch/action/search/UpdatePitContextResponse.java @@ -10,7 +10,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/action/support/ChannelActionListener.java b/server/src/main/java/org/opensearch/action/support/ChannelActionListener.java index 5b0475093d3c2..07851345241bd 100644 --- a/server/src/main/java/org/opensearch/action/support/ChannelActionListener.java +++ b/server/src/main/java/org/opensearch/action/support/ChannelActionListener.java @@ -35,7 +35,7 @@ import org.opensearch.action.ActionListener; import org.opensearch.transport.TransportChannel; import org.opensearch.transport.TransportRequest; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; /** * Listener for transport channel actions diff --git a/server/src/main/java/org/opensearch/action/support/broadcast/BroadcastShardResponse.java b/server/src/main/java/org/opensearch/action/support/broadcast/BroadcastShardResponse.java index 39c524448bc5d..9603f886366f2 100644 --- a/server/src/main/java/org/opensearch/action/support/broadcast/BroadcastShardResponse.java +++ b/server/src/main/java/org/opensearch/action/support/broadcast/BroadcastShardResponse.java @@ -35,7 +35,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.index.shard.ShardId; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/server/src/main/java/org/opensearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index bf71134ab7b88..1b7822ee5a440 100644 --- a/server/src/main/java/org/opensearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/server/src/main/java/org/opensearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -63,7 +63,7 @@ import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestHandler; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; import org.opensearch.transport.TransportRequestOptions; diff --git a/server/src/main/java/org/opensearch/action/support/nodes/BaseNodeResponse.java b/server/src/main/java/org/opensearch/action/support/nodes/BaseNodeResponse.java index 4a94f790c3443..8a4e12567b515 100644 --- a/server/src/main/java/org/opensearch/action/support/nodes/BaseNodeResponse.java +++ b/server/src/main/java/org/opensearch/action/support/nodes/BaseNodeResponse.java @@ -35,7 +35,7 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/action/support/tasks/TransportTasksAction.java b/server/src/main/java/org/opensearch/action/support/tasks/TransportTasksAction.java index bfd207e6f969f..e06858ab1a201 100644 --- a/server/src/main/java/org/opensearch/action/support/tasks/TransportTasksAction.java +++ b/server/src/main/java/org/opensearch/action/support/tasks/TransportTasksAction.java @@ -57,7 +57,7 @@ import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestHandler; import org.opensearch.transport.TransportRequestOptions; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; diff --git a/server/src/main/java/org/opensearch/bootstrap/Bootstrap.java b/server/src/main/java/org/opensearch/bootstrap/Bootstrap.java index 2a23b501a8a0e..9383260d1bd73 100644 --- a/server/src/main/java/org/opensearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/opensearch/bootstrap/Bootstrap.java @@ -55,7 +55,7 @@ import org.opensearch.common.settings.SecureSettings; import org.opensearch.core.common.settings.SecureString; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; import org.opensearch.common.util.io.IOUtils; import org.opensearch.env.Environment; import org.opensearch.monitor.jvm.JvmInfo; diff --git a/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java index c27c149947444..f9661e71d60e6 100644 --- a/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java @@ -42,8 +42,8 @@ import org.opensearch.common.SuppressForbidden; import org.opensearch.common.io.PathUtils; import org.opensearch.common.settings.Setting; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.discovery.DiscoveryModule; import org.opensearch.env.Environment; import org.opensearch.index.IndexModule; diff --git a/server/src/main/java/org/opensearch/cluster/ClusterSettingsResponse.java b/server/src/main/java/org/opensearch/cluster/ClusterSettingsResponse.java index ee5c8c00dfaf4..408344f476fe2 100644 --- a/server/src/main/java/org/opensearch/cluster/ClusterSettingsResponse.java +++ b/server/src/main/java/org/opensearch/cluster/ClusterSettingsResponse.java @@ -12,7 +12,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.Objects; diff --git a/server/src/main/java/org/opensearch/cluster/action/index/NodeMappingRefreshAction.java b/server/src/main/java/org/opensearch/cluster/action/index/NodeMappingRefreshAction.java index e6781fc22e1a7..47e4e59dadd3f 100644 --- a/server/src/main/java/org/opensearch/cluster/action/index/NodeMappingRefreshAction.java +++ b/server/src/main/java/org/opensearch/cluster/action/index/NodeMappingRefreshAction.java @@ -48,7 +48,7 @@ import org.opensearch.transport.TransportChannel; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestHandler; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/opensearch/cluster/action/shard/ShardStateAction.java index 9d9b6c52f6b25..30dcb5fd08954 100644 --- a/server/src/main/java/org/opensearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/opensearch/cluster/action/shard/ShardStateAction.java @@ -74,7 +74,7 @@ import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestDeduplicator; import org.opensearch.transport.TransportRequestHandler; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelper.java b/server/src/main/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelper.java index 9b51e56dce966..05a5ac862a5b1 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelper.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelper.java @@ -40,7 +40,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.gateway.GatewayMetaState; diff --git a/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java index 1c38e68c43466..b57da128ce852 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java @@ -67,7 +67,7 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.common.util.concurrent.ListenableFuture; @@ -86,7 +86,7 @@ import org.opensearch.monitor.StatusInfo; import org.opensearch.threadpool.Scheduler; import org.opensearch.threadpool.ThreadPool.Names; -import org.opensearch.transport.TransportResponse.Empty; +import org.opensearch.core.transport.TransportResponse.Empty; import org.opensearch.transport.TransportService; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/cluster/coordination/FollowersChecker.java b/server/src/main/java/org/opensearch/cluster/coordination/FollowersChecker.java index 08008152cfcd6..94d2e11ab591e 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/FollowersChecker.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/FollowersChecker.java @@ -55,7 +55,7 @@ import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestOptions; import org.opensearch.transport.TransportRequestOptions.Type; -import org.opensearch.transport.TransportResponse.Empty; +import org.opensearch.core.transport.TransportResponse.Empty; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; diff --git a/server/src/main/java/org/opensearch/cluster/coordination/JoinHelper.java b/server/src/main/java/org/opensearch/cluster/coordination/JoinHelper.java index 42f09f95a7f56..f923176efa5e5 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/JoinHelper.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/JoinHelper.java @@ -67,8 +67,8 @@ import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestOptions; -import org.opensearch.transport.TransportResponse; -import org.opensearch.transport.TransportResponse.Empty; +import org.opensearch.core.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse.Empty; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; diff --git a/server/src/main/java/org/opensearch/cluster/coordination/LeaderChecker.java b/server/src/main/java/org/opensearch/cluster/coordination/LeaderChecker.java index f43abf0080575..009a2121a5886 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/LeaderChecker.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/LeaderChecker.java @@ -56,8 +56,8 @@ import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestOptions; import org.opensearch.transport.TransportRequestOptions.Type; -import org.opensearch.transport.TransportResponse; -import org.opensearch.transport.TransportResponse.Empty; +import org.opensearch.core.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse.Empty; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; diff --git a/server/src/main/java/org/opensearch/cluster/coordination/PeersResponse.java b/server/src/main/java/org/opensearch/cluster/coordination/PeersResponse.java index b68f689ef63fd..8a70c71d53fdd 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/PeersResponse.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/PeersResponse.java @@ -35,7 +35,7 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.List; diff --git a/server/src/main/java/org/opensearch/cluster/coordination/PreVoteResponse.java b/server/src/main/java/org/opensearch/cluster/coordination/PreVoteResponse.java index c8186441db449..9c683f7de0878 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/PreVoteResponse.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/PreVoteResponse.java @@ -34,7 +34,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.Objects; diff --git a/server/src/main/java/org/opensearch/cluster/coordination/Publication.java b/server/src/main/java/org/opensearch/cluster/coordination/Publication.java index 429890e7420de..6ffca828ecb06 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/Publication.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/Publication.java @@ -43,7 +43,7 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.unit.TimeValue; import org.opensearch.transport.TransportException; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.util.ArrayList; import java.util.List; diff --git a/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java b/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java index 60c931a601561..64c3b93e0e0be 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/PublicationTransportHandler.java @@ -51,7 +51,7 @@ import org.opensearch.transport.TransportChannel; import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportRequestOptions; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; diff --git a/server/src/main/java/org/opensearch/cluster/coordination/PublishWithJoinResponse.java b/server/src/main/java/org/opensearch/cluster/coordination/PublishWithJoinResponse.java index f99ba82be5514..f6350c5558a82 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/PublishWithJoinResponse.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/PublishWithJoinResponse.java @@ -33,7 +33,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.Optional; diff --git a/server/src/main/java/org/opensearch/cluster/node/DiscoveryNode.java b/server/src/main/java/org/opensearch/cluster/node/DiscoveryNode.java index d6ba0199d193c..f44085a232127 100644 --- a/server/src/main/java/org/opensearch/cluster/node/DiscoveryNode.java +++ b/server/src/main/java/org/opensearch/cluster/node/DiscoveryNode.java @@ -39,7 +39,7 @@ import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.node.Node; diff --git a/server/src/main/java/org/opensearch/cluster/node/DiscoveryNodeFilters.java b/server/src/main/java/org/opensearch/cluster/node/DiscoveryNodeFilters.java index 4fd2905495961..2f0fb120311d9 100644 --- a/server/src/main/java/org/opensearch/cluster/node/DiscoveryNodeFilters.java +++ b/server/src/main/java/org/opensearch/cluster/node/DiscoveryNodeFilters.java @@ -37,7 +37,7 @@ import org.opensearch.common.network.NetworkAddress; import org.opensearch.common.regex.Regex; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.Strings; import java.util.HashMap; diff --git a/server/src/main/java/org/opensearch/cluster/node/DiscoveryNodes.java b/server/src/main/java/org/opensearch/cluster/node/DiscoveryNodes.java index 060c7c5eb8d1a..e84bbc7a203f2 100644 --- a/server/src/main/java/org/opensearch/cluster/node/DiscoveryNodes.java +++ b/server/src/main/java/org/opensearch/cluster/node/DiscoveryNodes.java @@ -40,7 +40,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.regex.Regex; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.set.Sets; import org.opensearch.core.common.Strings; diff --git a/server/src/main/java/org/opensearch/discovery/DiscoveryModule.java b/server/src/main/java/org/opensearch/discovery/DiscoveryModule.java index cf2f7b47288fd..13d9c6d97e079 100644 --- a/server/src/main/java/org/opensearch/discovery/DiscoveryModule.java +++ b/server/src/main/java/org/opensearch/discovery/DiscoveryModule.java @@ -49,7 +49,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.gateway.GatewayMetaState; import org.opensearch.monitor.NodeHealthService; import org.opensearch.plugins.DiscoveryPlugin; diff --git a/server/src/main/java/org/opensearch/discovery/FileBasedSeedHostsProvider.java b/server/src/main/java/org/opensearch/discovery/FileBasedSeedHostsProvider.java index 94f36ba0a546e..3159733336057 100644 --- a/server/src/main/java/org/opensearch/discovery/FileBasedSeedHostsProvider.java +++ b/server/src/main/java/org/opensearch/discovery/FileBasedSeedHostsProvider.java @@ -35,7 +35,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import java.io.IOException; import java.nio.file.Files; diff --git a/server/src/main/java/org/opensearch/discovery/HandshakingTransportAddressConnector.java b/server/src/main/java/org/opensearch/discovery/HandshakingTransportAddressConnector.java index 90ca19e9369f4..80ce094785755 100644 --- a/server/src/main/java/org/opensearch/discovery/HandshakingTransportAddressConnector.java +++ b/server/src/main/java/org/opensearch/discovery/HandshakingTransportAddressConnector.java @@ -43,7 +43,7 @@ import org.opensearch.common.UUIDs; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.io.IOUtils; diff --git a/server/src/main/java/org/opensearch/discovery/InitializeExtensionResponse.java b/server/src/main/java/org/opensearch/discovery/InitializeExtensionResponse.java index f56ffc84a7909..33ca1edcf3330 100644 --- a/server/src/main/java/org/opensearch/discovery/InitializeExtensionResponse.java +++ b/server/src/main/java/org/opensearch/discovery/InitializeExtensionResponse.java @@ -34,7 +34,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/main/java/org/opensearch/discovery/PeerFinder.java b/server/src/main/java/org/opensearch/discovery/PeerFinder.java index 96556d1cd71ed..f470342d826f8 100644 --- a/server/src/main/java/org/opensearch/discovery/PeerFinder.java +++ b/server/src/main/java/org/opensearch/discovery/PeerFinder.java @@ -44,7 +44,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.threadpool.ThreadPool.Names; diff --git a/server/src/main/java/org/opensearch/discovery/SeedHostsProvider.java b/server/src/main/java/org/opensearch/discovery/SeedHostsProvider.java index 1a4b5a3182dbe..89dfd9310e895 100644 --- a/server/src/main/java/org/opensearch/discovery/SeedHostsProvider.java +++ b/server/src/main/java/org/opensearch/discovery/SeedHostsProvider.java @@ -32,7 +32,7 @@ package org.opensearch.discovery; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import java.util.List; diff --git a/server/src/main/java/org/opensearch/discovery/SeedHostsResolver.java b/server/src/main/java/org/opensearch/discovery/SeedHostsResolver.java index cef7853011b82..43b685052c9a8 100644 --- a/server/src/main/java/org/opensearch/discovery/SeedHostsResolver.java +++ b/server/src/main/java/org/opensearch/discovery/SeedHostsResolver.java @@ -38,7 +38,7 @@ import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.CancellableThreads; import org.opensearch.common.util.concurrent.AbstractRunnable; diff --git a/server/src/main/java/org/opensearch/discovery/SettingsBasedSeedHostsProvider.java b/server/src/main/java/org/opensearch/discovery/SettingsBasedSeedHostsProvider.java index 2dcd819e727f2..9785d5b21078e 100644 --- a/server/src/main/java/org/opensearch/discovery/SettingsBasedSeedHostsProvider.java +++ b/server/src/main/java/org/opensearch/discovery/SettingsBasedSeedHostsProvider.java @@ -37,7 +37,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.transport.TransportService; import java.util.List; diff --git a/server/src/main/java/org/opensearch/env/EnvironmentSettingsResponse.java b/server/src/main/java/org/opensearch/env/EnvironmentSettingsResponse.java index ce3aa0556744b..1b87011ff8c75 100644 --- a/server/src/main/java/org/opensearch/env/EnvironmentSettingsResponse.java +++ b/server/src/main/java/org/opensearch/env/EnvironmentSettingsResponse.java @@ -11,7 +11,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.Objects; diff --git a/server/src/main/java/org/opensearch/extensions/AcknowledgedResponse.java b/server/src/main/java/org/opensearch/extensions/AcknowledgedResponse.java index 7c7e3e78798e8..32ad108b728b7 100644 --- a/server/src/main/java/org/opensearch/extensions/AcknowledgedResponse.java +++ b/server/src/main/java/org/opensearch/extensions/AcknowledgedResponse.java @@ -10,7 +10,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.Objects; diff --git a/server/src/main/java/org/opensearch/extensions/AddSettingsUpdateConsumerRequestHandler.java b/server/src/main/java/org/opensearch/extensions/AddSettingsUpdateConsumerRequestHandler.java index 67c56b7f458ff..4cac2f3b1562c 100644 --- a/server/src/main/java/org/opensearch/extensions/AddSettingsUpdateConsumerRequestHandler.java +++ b/server/src/main/java/org/opensearch/extensions/AddSettingsUpdateConsumerRequestHandler.java @@ -17,7 +17,7 @@ import org.opensearch.common.settings.SettingsException; import org.opensearch.common.settings.SettingsModule; import org.opensearch.common.settings.WriteableSetting; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; /** diff --git a/server/src/main/java/org/opensearch/extensions/DiscoveryExtensionNode.java b/server/src/main/java/org/opensearch/extensions/DiscoveryExtensionNode.java index ac1dfe5309ffa..a888f99ff11ed 100644 --- a/server/src/main/java/org/opensearch/extensions/DiscoveryExtensionNode.java +++ b/server/src/main/java/org/opensearch/extensions/DiscoveryExtensionNode.java @@ -15,7 +15,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/extensions/ExtensionDependencyResponse.java b/server/src/main/java/org/opensearch/extensions/ExtensionDependencyResponse.java index d9531c0cc2894..5fb084caf89f3 100644 --- a/server/src/main/java/org/opensearch/extensions/ExtensionDependencyResponse.java +++ b/server/src/main/java/org/opensearch/extensions/ExtensionDependencyResponse.java @@ -15,7 +15,7 @@ import java.util.Objects; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; /** * The response for getting the Extension Dependency. diff --git a/server/src/main/java/org/opensearch/extensions/ExtensionsManager.java b/server/src/main/java/org/opensearch/extensions/ExtensionsManager.java index 468de4238f879..c6ebe295976ce 100644 --- a/server/src/main/java/org/opensearch/extensions/ExtensionsManager.java +++ b/server/src/main/java/org/opensearch/extensions/ExtensionsManager.java @@ -32,7 +32,7 @@ import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsModule; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; @@ -52,7 +52,7 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.ConnectTransportException; import org.opensearch.transport.TransportException; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; import org.opensearch.env.EnvironmentSettingsResponse; diff --git a/server/src/main/java/org/opensearch/extensions/action/ExtensionTransportActionsHandler.java b/server/src/main/java/org/opensearch/extensions/action/ExtensionTransportActionsHandler.java index 22502509634c6..19fe43e9a6d61 100644 --- a/server/src/main/java/org/opensearch/extensions/action/ExtensionTransportActionsHandler.java +++ b/server/src/main/java/org/opensearch/extensions/action/ExtensionTransportActionsHandler.java @@ -22,7 +22,7 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.ActionNotFoundTransportException; import org.opensearch.transport.TransportException; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; diff --git a/server/src/main/java/org/opensearch/extensions/rest/RestActionsRequestHandler.java b/server/src/main/java/org/opensearch/extensions/rest/RestActionsRequestHandler.java index d890c1b85bb81..563bfc5a5c9da 100644 --- a/server/src/main/java/org/opensearch/extensions/rest/RestActionsRequestHandler.java +++ b/server/src/main/java/org/opensearch/extensions/rest/RestActionsRequestHandler.java @@ -13,7 +13,7 @@ import org.opensearch.extensions.DiscoveryExtensionNode; import org.opensearch.rest.RestController; import org.opensearch.rest.RestHandler; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import java.util.Map; diff --git a/server/src/main/java/org/opensearch/extensions/rest/RestExecuteOnExtensionResponse.java b/server/src/main/java/org/opensearch/extensions/rest/RestExecuteOnExtensionResponse.java index 63ae6ce93af22..c0c53f4e97b31 100644 --- a/server/src/main/java/org/opensearch/extensions/rest/RestExecuteOnExtensionResponse.java +++ b/server/src/main/java/org/opensearch/extensions/rest/RestExecuteOnExtensionResponse.java @@ -12,7 +12,7 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.rest.RestResponse; import org.opensearch.core.rest.RestStatus; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.List; diff --git a/server/src/main/java/org/opensearch/extensions/settings/CustomSettingsRequestHandler.java b/server/src/main/java/org/opensearch/extensions/settings/CustomSettingsRequestHandler.java index 980dcf67c3128..a9070e77c2942 100644 --- a/server/src/main/java/org/opensearch/extensions/settings/CustomSettingsRequestHandler.java +++ b/server/src/main/java/org/opensearch/extensions/settings/CustomSettingsRequestHandler.java @@ -11,7 +11,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.SettingsModule; import org.opensearch.extensions.AcknowledgedResponse; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.util.ArrayList; import java.util.List; diff --git a/server/src/main/java/org/opensearch/gateway/LocalAllocateDangledIndices.java b/server/src/main/java/org/opensearch/gateway/LocalAllocateDangledIndices.java index 5ee369d6b9402..5d6843a9684e1 100644 --- a/server/src/main/java/org/opensearch/gateway/LocalAllocateDangledIndices.java +++ b/server/src/main/java/org/opensearch/gateway/LocalAllocateDangledIndices.java @@ -59,7 +59,7 @@ import org.opensearch.transport.TransportChannel; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestHandler; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java index 0ba49be01d193..bedeedb9c5c6f 100644 --- a/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/opensearch/http/AbstractHttpServerTransport.java @@ -43,10 +43,10 @@ import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; import org.opensearch.common.transport.NetworkExceptionHelper; import org.opensearch.common.transport.PortsRange; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.concurrent.ThreadContext; diff --git a/server/src/main/java/org/opensearch/http/HttpInfo.java b/server/src/main/java/org/opensearch/http/HttpInfo.java index 24c29b8dc7444..35eadc5a5de9e 100644 --- a/server/src/main/java/org/opensearch/http/HttpInfo.java +++ b/server/src/main/java/org/opensearch/http/HttpInfo.java @@ -36,8 +36,8 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.network.InetAddresses; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.node.ReportingService; diff --git a/server/src/main/java/org/opensearch/http/HttpServerTransport.java b/server/src/main/java/org/opensearch/http/HttpServerTransport.java index 6549f0786fcda..7bd16a286e33b 100644 --- a/server/src/main/java/org/opensearch/http/HttpServerTransport.java +++ b/server/src/main/java/org/opensearch/http/HttpServerTransport.java @@ -33,7 +33,7 @@ package org.opensearch.http; import org.opensearch.common.lifecycle.LifecycleComponent; -import org.opensearch.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.node.ReportingService; import org.opensearch.rest.RestChannel; diff --git a/server/src/main/java/org/opensearch/index/IndicesModuleResponse.java b/server/src/main/java/org/opensearch/index/IndicesModuleResponse.java index 67f2c686dbf8b..3d2340ed35a8c 100644 --- a/server/src/main/java/org/opensearch/index/IndicesModuleResponse.java +++ b/server/src/main/java/org/opensearch/index/IndicesModuleResponse.java @@ -10,7 +10,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.Objects; diff --git a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java index 386b2e0e8192d..569977afd3ac5 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java @@ -74,7 +74,7 @@ import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestHandler; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoveryResponse.java b/server/src/main/java/org/opensearch/indices/recovery/RecoveryResponse.java index 8af69b1786e38..c1203a9f4939f 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoveryResponse.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoveryResponse.java @@ -34,7 +34,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.List; diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoveryTranslogOperationsResponse.java b/server/src/main/java/org/opensearch/indices/recovery/RecoveryTranslogOperationsResponse.java index b623d382b415f..9ea9f1d48a494 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoveryTranslogOperationsResponse.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoveryTranslogOperationsResponse.java @@ -34,7 +34,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/indices/recovery/RemoteRecoveryTargetHandler.java b/server/src/main/java/org/opensearch/indices/recovery/RemoteRecoveryTargetHandler.java index cdc62350b4aa5..35585def29365 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RemoteRecoveryTargetHandler.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RemoteRecoveryTargetHandler.java @@ -48,7 +48,7 @@ import org.opensearch.indices.replication.SegmentReplicationTargetService; import org.opensearch.transport.EmptyTransportResponseHandler; import org.opensearch.transport.TransportRequestOptions; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import java.util.List; diff --git a/server/src/main/java/org/opensearch/indices/recovery/RetryableTransportClient.java b/server/src/main/java/org/opensearch/indices/recovery/RetryableTransportClient.java index 4f1fb42a421f6..7de2d0e2fb3d4 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RetryableTransportClient.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RetryableTransportClient.java @@ -26,7 +26,7 @@ import org.opensearch.transport.SendRequestTransportException; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestOptions; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import java.util.Map; diff --git a/server/src/main/java/org/opensearch/indices/replication/CheckpointInfoResponse.java b/server/src/main/java/org/opensearch/indices/replication/CheckpointInfoResponse.java index 0155883f34552..72ee7203e5e98 100644 --- a/server/src/main/java/org/opensearch/indices/replication/CheckpointInfoResponse.java +++ b/server/src/main/java/org/opensearch/indices/replication/CheckpointInfoResponse.java @@ -12,7 +12,7 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.index.store.StoreFileMetadata; import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.Map; diff --git a/server/src/main/java/org/opensearch/indices/replication/GetSegmentFilesResponse.java b/server/src/main/java/org/opensearch/indices/replication/GetSegmentFilesResponse.java index 33a84833f2418..7409e09310737 100644 --- a/server/src/main/java/org/opensearch/indices/replication/GetSegmentFilesResponse.java +++ b/server/src/main/java/org/opensearch/indices/replication/GetSegmentFilesResponse.java @@ -11,7 +11,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.index.store.StoreFileMetadata; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.List; diff --git a/server/src/main/java/org/opensearch/indices/replication/RemoteSegmentFileChunkWriter.java b/server/src/main/java/org/opensearch/indices/replication/RemoteSegmentFileChunkWriter.java index ed171927c4600..d96f9544e5734 100644 --- a/server/src/main/java/org/opensearch/indices/replication/RemoteSegmentFileChunkWriter.java +++ b/server/src/main/java/org/opensearch/indices/replication/RemoteSegmentFileChunkWriter.java @@ -20,7 +20,7 @@ import org.opensearch.indices.recovery.RetryableTransportClient; import org.opensearch.indices.recovery.FileChunkWriter; import org.opensearch.transport.TransportRequestOptions; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.concurrent.atomic.AtomicLong; diff --git a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationSourceService.java b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationSourceService.java index 8bb2a61e32e2d..090e3f96a8f65 100644 --- a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationSourceService.java +++ b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationSourceService.java @@ -34,7 +34,7 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportChannel; import org.opensearch.transport.TransportRequestHandler; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java index 7c35c4f07598e..b41c9e09add45 100644 --- a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java +++ b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java @@ -41,7 +41,7 @@ import org.opensearch.transport.TransportChannel; import org.opensearch.transport.TransportRequestHandler; import org.opensearch.transport.TransportRequestOptions; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import java.util.Map; diff --git a/server/src/main/java/org/opensearch/indices/replication/common/ReplicationTarget.java b/server/src/main/java/org/opensearch/indices/replication/common/ReplicationTarget.java index 0c96a87715014..0e06791768745 100644 --- a/server/src/main/java/org/opensearch/indices/replication/common/ReplicationTarget.java +++ b/server/src/main/java/org/opensearch/indices/replication/common/ReplicationTarget.java @@ -28,7 +28,7 @@ import org.opensearch.indices.recovery.FileChunkRequest; import org.opensearch.indices.recovery.RecoveryTransportRequest; import org.opensearch.transport.TransportChannel; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.concurrent.atomic.AtomicBoolean; diff --git a/server/src/main/java/org/opensearch/indices/store/IndicesStore.java b/server/src/main/java/org/opensearch/indices/store/IndicesStore.java index eaaf5198fba94..95ecc1359e5d1 100644 --- a/server/src/main/java/org/opensearch/indices/store/IndicesStore.java +++ b/server/src/main/java/org/opensearch/indices/store/IndicesStore.java @@ -69,7 +69,7 @@ import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestHandler; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index d8b51b0184a66..60c95a04a042f 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -133,8 +133,8 @@ import org.opensearch.common.settings.SettingUpgrader; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsModule; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.PageCacheRecycler; diff --git a/server/src/main/java/org/opensearch/node/NodeValidationException.java b/server/src/main/java/org/opensearch/node/NodeValidationException.java index ef1500f1e4ede..b316288b7cf06 100644 --- a/server/src/main/java/org/opensearch/node/NodeValidationException.java +++ b/server/src/main/java/org/opensearch/node/NodeValidationException.java @@ -32,7 +32,7 @@ package org.opensearch.node; -import org.opensearch.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; import java.util.List; diff --git a/server/src/main/java/org/opensearch/plugins/DiscoveryPlugin.java b/server/src/main/java/org/opensearch/plugins/DiscoveryPlugin.java index 89433b2a3b67d..bca72942bd70e 100644 --- a/server/src/main/java/org/opensearch/plugins/DiscoveryPlugin.java +++ b/server/src/main/java/org/opensearch/plugins/DiscoveryPlugin.java @@ -37,6 +37,7 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.Settings; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.discovery.SeedHostsProvider; import org.opensearch.transport.TransportService; @@ -86,7 +87,7 @@ default NetworkService.CustomNameResolver getCustomNameResolver(Settings setting * (see {@link org.opensearch.discovery.DiscoveryModule#DISCOVERY_SEED_PROVIDERS_SETTING}), and * the value is a supplier to construct the host provider when it is selected for use. * - * @param transportService Use to form the {@link org.opensearch.common.transport.TransportAddress} portion + * @param transportService Use to form the {@link TransportAddress} portion * of a {@link org.opensearch.cluster.node.DiscoveryNode} * @param networkService Use to find the publish host address of the current node */ diff --git a/server/src/main/java/org/opensearch/repositories/VerifyNodeRepositoryAction.java b/server/src/main/java/org/opensearch/repositories/VerifyNodeRepositoryAction.java index ff5ffdbfe1e3e..1f0a5060962c6 100644 --- a/server/src/main/java/org/opensearch/repositories/VerifyNodeRepositoryAction.java +++ b/server/src/main/java/org/opensearch/repositories/VerifyNodeRepositoryAction.java @@ -48,7 +48,7 @@ import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestHandler; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java index b54c8955283a2..e29898624386c 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java @@ -48,7 +48,7 @@ import org.opensearch.common.Table; import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.network.NetworkAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.http.HttpInfo; import org.opensearch.index.cache.query.QueryCacheStats; diff --git a/server/src/main/java/org/opensearch/search/SearchPhaseResult.java b/server/src/main/java/org/opensearch/search/SearchPhaseResult.java index 1b4cebbe91a3e..9aace832cf72f 100644 --- a/server/src/main/java/org/opensearch/search/SearchPhaseResult.java +++ b/server/src/main/java/org/opensearch/search/SearchPhaseResult.java @@ -39,7 +39,7 @@ import org.opensearch.search.internal.ShardSearchContextId; import org.opensearch.search.internal.ShardSearchRequest; import org.opensearch.search.query.QuerySearchResult; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/tasks/TaskCancellationService.java b/server/src/main/java/org/opensearch/tasks/TaskCancellationService.java index 7ff27fa1096dc..a24cdd02bbd30 100644 --- a/server/src/main/java/org/opensearch/tasks/TaskCancellationService.java +++ b/server/src/main/java/org/opensearch/tasks/TaskCancellationService.java @@ -49,7 +49,7 @@ import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestHandler; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/transport/ActionTransportException.java b/server/src/main/java/org/opensearch/transport/ActionTransportException.java index 97e9a986db7f4..fb5dd2c75dc75 100644 --- a/server/src/main/java/org/opensearch/transport/ActionTransportException.java +++ b/server/src/main/java/org/opensearch/transport/ActionTransportException.java @@ -34,7 +34,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/transport/EmptyTransportResponseHandler.java b/server/src/main/java/org/opensearch/transport/EmptyTransportResponseHandler.java index 3b97a81faf192..1691b427ffca1 100644 --- a/server/src/main/java/org/opensearch/transport/EmptyTransportResponseHandler.java +++ b/server/src/main/java/org/opensearch/transport/EmptyTransportResponseHandler.java @@ -33,6 +33,7 @@ package org.opensearch.transport; import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.threadpool.ThreadPool; /** diff --git a/server/src/main/java/org/opensearch/transport/InboundHandler.java b/server/src/main/java/org/opensearch/transport/InboundHandler.java index bb04f149d39a9..12eaabb0a74eb 100644 --- a/server/src/main/java/org/opensearch/transport/InboundHandler.java +++ b/server/src/main/java/org/opensearch/transport/InboundHandler.java @@ -41,10 +41,11 @@ import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.threadpool.ThreadPool; import java.io.EOFException; diff --git a/server/src/main/java/org/opensearch/transport/OutboundHandler.java b/server/src/main/java/org/opensearch/transport/OutboundHandler.java index 3e493267242fb..c67a6df8f90f6 100644 --- a/server/src/main/java/org/opensearch/transport/OutboundHandler.java +++ b/server/src/main/java/org/opensearch/transport/OutboundHandler.java @@ -44,12 +44,13 @@ import org.opensearch.common.io.stream.ReleasableBytesStreamOutput; import org.opensearch.common.network.CloseableChannel; import org.opensearch.common.transport.NetworkExceptionHelper; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.common.util.io.IOUtils; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.threadpool.ThreadPool; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/transport/PlainTransportFuture.java b/server/src/main/java/org/opensearch/transport/PlainTransportFuture.java index 53d71c135a8dd..19123aba7d413 100644 --- a/server/src/main/java/org/opensearch/transport/PlainTransportFuture.java +++ b/server/src/main/java/org/opensearch/transport/PlainTransportFuture.java @@ -36,6 +36,7 @@ import org.opensearch.OpenSearchTimeoutException; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.util.concurrent.BaseFuture; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.concurrent.ExecutionException; diff --git a/server/src/main/java/org/opensearch/transport/ProxyConnectionStrategy.java b/server/src/main/java/org/opensearch/transport/ProxyConnectionStrategy.java index b1eefb9fac245..fcaad01e78a50 100644 --- a/server/src/main/java/org/opensearch/transport/ProxyConnectionStrategy.java +++ b/server/src/main/java/org/opensearch/transport/ProxyConnectionStrategy.java @@ -43,7 +43,7 @@ import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.concurrent.CountDown; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/transport/RemoteTransportException.java b/server/src/main/java/org/opensearch/transport/RemoteTransportException.java index 041a70795b8de..de3d6bb9d775e 100644 --- a/server/src/main/java/org/opensearch/transport/RemoteTransportException.java +++ b/server/src/main/java/org/opensearch/transport/RemoteTransportException.java @@ -34,7 +34,7 @@ import org.opensearch.OpenSearchWrapperException; import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/transport/SniffConnectionStrategy.java b/server/src/main/java/org/opensearch/transport/SniffConnectionStrategy.java index 5e00704b3baaf..c95a65fadf323 100644 --- a/server/src/main/java/org/opensearch/transport/SniffConnectionStrategy.java +++ b/server/src/main/java/org/opensearch/transport/SniffConnectionStrategy.java @@ -50,7 +50,7 @@ import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/transport/TaskTransportChannel.java b/server/src/main/java/org/opensearch/transport/TaskTransportChannel.java index 4dceee4c48d4d..052611317f174 100644 --- a/server/src/main/java/org/opensearch/transport/TaskTransportChannel.java +++ b/server/src/main/java/org/opensearch/transport/TaskTransportChannel.java @@ -34,6 +34,7 @@ import org.opensearch.Version; import org.opensearch.common.lease.Releasable; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/transport/TcpTransport.java b/server/src/main/java/org/opensearch/transport/TcpTransport.java index 8733cb5fe7e8e..b99e750366018 100644 --- a/server/src/main/java/org/opensearch/transport/TcpTransport.java +++ b/server/src/main/java/org/opensearch/transport/TcpTransport.java @@ -54,9 +54,9 @@ import org.opensearch.common.network.NetworkUtils; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; import org.opensearch.common.transport.PortsRange; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.BigArrays; diff --git a/server/src/main/java/org/opensearch/transport/TcpTransportChannel.java b/server/src/main/java/org/opensearch/transport/TcpTransportChannel.java index e25003648794d..00702d08902a9 100644 --- a/server/src/main/java/org/opensearch/transport/TcpTransportChannel.java +++ b/server/src/main/java/org/opensearch/transport/TcpTransportChannel.java @@ -34,6 +34,7 @@ import org.opensearch.Version; import org.opensearch.common.lease.Releasable; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.search.query.QuerySearchResult; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/transport/Transport.java b/server/src/main/java/org/opensearch/transport/Transport.java index 3bf855f847685..26bae24f70f26 100644 --- a/server/src/main/java/org/opensearch/transport/Transport.java +++ b/server/src/main/java/org/opensearch/transport/Transport.java @@ -37,11 +37,12 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.lifecycle.LifecycleComponent; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.common.util.concurrent.ConcurrentMapLong; +import org.opensearch.core.transport.TransportResponse; import java.io.Closeable; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/transport/TransportActionProxy.java b/server/src/main/java/org/opensearch/transport/TransportActionProxy.java index 8ad6010800ad8..a61aec8a34e20 100644 --- a/server/src/main/java/org/opensearch/transport/TransportActionProxy.java +++ b/server/src/main/java/org/opensearch/transport/TransportActionProxy.java @@ -35,6 +35,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; diff --git a/server/src/main/java/org/opensearch/transport/TransportChannel.java b/server/src/main/java/org/opensearch/transport/TransportChannel.java index b660db029c0b2..3c582127f28e8 100644 --- a/server/src/main/java/org/opensearch/transport/TransportChannel.java +++ b/server/src/main/java/org/opensearch/transport/TransportChannel.java @@ -36,6 +36,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.opensearch.Version; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/transport/TransportHandshaker.java b/server/src/main/java/org/opensearch/transport/TransportHandshaker.java index 7b64b328469ad..8ce4f6887cbd9 100644 --- a/server/src/main/java/org/opensearch/transport/TransportHandshaker.java +++ b/server/src/main/java/org/opensearch/transport/TransportHandshaker.java @@ -40,6 +40,7 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.metrics.CounterMetric; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.threadpool.ThreadPool; import java.io.EOFException; diff --git a/server/src/main/java/org/opensearch/transport/TransportInfo.java b/server/src/main/java/org/opensearch/transport/TransportInfo.java index 0ee8672e8df8c..308abb2bc28f4 100644 --- a/server/src/main/java/org/opensearch/transport/TransportInfo.java +++ b/server/src/main/java/org/opensearch/transport/TransportInfo.java @@ -37,8 +37,8 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.network.InetAddresses; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.node.ReportingService; diff --git a/server/src/main/java/org/opensearch/transport/TransportInterceptor.java b/server/src/main/java/org/opensearch/transport/TransportInterceptor.java index f4b003cae4864..9ee2db6d39893 100644 --- a/server/src/main/java/org/opensearch/transport/TransportInterceptor.java +++ b/server/src/main/java/org/opensearch/transport/TransportInterceptor.java @@ -34,6 +34,7 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.core.common.io.stream.Writeable.Reader; +import org.opensearch.core.transport.TransportResponse; /** * This interface allows plugins to intercept requests on both the sender and the receiver side. diff --git a/server/src/main/java/org/opensearch/transport/TransportMessageListener.java b/server/src/main/java/org/opensearch/transport/TransportMessageListener.java index 8a7612d3bd99a..dfcd7acce3706 100644 --- a/server/src/main/java/org/opensearch/transport/TransportMessageListener.java +++ b/server/src/main/java/org/opensearch/transport/TransportMessageListener.java @@ -32,6 +32,7 @@ package org.opensearch.transport; import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.core.transport.TransportResponse; /** * Listens for transport messages diff --git a/server/src/main/java/org/opensearch/transport/TransportRequest.java b/server/src/main/java/org/opensearch/transport/TransportRequest.java index 95b038303f530..3eb465718763e 100644 --- a/server/src/main/java/org/opensearch/transport/TransportRequest.java +++ b/server/src/main/java/org/opensearch/transport/TransportRequest.java @@ -34,6 +34,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.transport.TransportMessage; import org.opensearch.tasks.TaskAwareRequest; import org.opensearch.tasks.TaskId; diff --git a/server/src/main/java/org/opensearch/transport/TransportResponseHandler.java b/server/src/main/java/org/opensearch/transport/TransportResponseHandler.java index 674a62fb75b7d..0b39983cc3bee 100644 --- a/server/src/main/java/org/opensearch/transport/TransportResponseHandler.java +++ b/server/src/main/java/org/opensearch/transport/TransportResponseHandler.java @@ -34,6 +34,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.function.Function; diff --git a/server/src/main/java/org/opensearch/transport/TransportService.java b/server/src/main/java/org/opensearch/transport/TransportService.java index 3b59c99c03d3b..61900fa1f0014 100644 --- a/server/src/main/java/org/opensearch/transport/TransportService.java +++ b/server/src/main/java/org/opensearch/transport/TransportService.java @@ -52,8 +52,8 @@ import org.opensearch.common.regex.Regex; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.concurrent.ThreadContext; @@ -61,6 +61,7 @@ import org.opensearch.common.lease.Releasable; import org.opensearch.core.common.Strings; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.node.NodeClosedException; import org.opensearch.node.ReportingService; import org.opensearch.tasks.Task; diff --git a/server/src/test/java/org/opensearch/ExceptionSerializationTests.java b/server/src/test/java/org/opensearch/ExceptionSerializationTests.java index 629fe9ebd4b99..920f3026871a0 100644 --- a/server/src/test/java/org/opensearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/opensearch/ExceptionSerializationTests.java @@ -72,7 +72,7 @@ import org.opensearch.core.common.io.stream.NotSerializableExceptionWrapper; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.CancellableThreadsTests; import org.opensearch.common.util.set.Sets; diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index e0676cc9ddbdd..1b8d3a2c276fb 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -53,7 +53,7 @@ import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.common.lease.Releasable; import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index b46d4fcfea2c9..6b4317d6ec934 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -73,7 +73,7 @@ import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportService; import org.opensearch.transport.TransportRequestOptions; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportResponseHandler; import java.io.IOException; diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java index 5571eb020b9e0..a2998b4bf6be7 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java @@ -45,7 +45,7 @@ import org.opensearch.cluster.routing.allocation.decider.Decision; import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.json.JsonXContent; diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java index a09a580de1475..ee4f22c6af93b 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java @@ -42,7 +42,7 @@ import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.Strings; import org.opensearch.index.query.RandomQueryBuilder; import org.opensearch.core.index.shard.ShardId; diff --git a/server/src/test/java/org/opensearch/action/admin/indices/close/TransportVerifyShardBeforeCloseActionTests.java b/server/src/test/java/org/opensearch/action/admin/indices/close/TransportVerifyShardBeforeCloseActionTests.java index 55e4efd8b10df..86e87de364a51 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/close/TransportVerifyShardBeforeCloseActionTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/close/TransportVerifyShardBeforeCloseActionTests.java @@ -70,7 +70,7 @@ import org.opensearch.test.transport.CapturingTransport; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import java.util.Collections; diff --git a/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java b/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java index c0c35e8c22f4d..ebdc7dcbfd6ac 100644 --- a/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java @@ -92,7 +92,7 @@ import org.opensearch.threadpool.ThreadPool.Names; import org.opensearch.transport.TestTransportChannel; import org.opensearch.transport.TransportChannel; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import java.io.IOException; diff --git a/server/src/test/java/org/opensearch/action/search/ClearScrollControllerTests.java b/server/src/test/java/org/opensearch/action/search/ClearScrollControllerTests.java index e3c7d4741d3ae..55e597dab04dc 100644 --- a/server/src/test/java/org/opensearch/action/search/ClearScrollControllerTests.java +++ b/server/src/test/java/org/opensearch/action/search/ClearScrollControllerTests.java @@ -46,7 +46,7 @@ import org.opensearch.test.VersionUtils; import org.opensearch.transport.NodeNotConnectedException; import org.opensearch.transport.Transport; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/test/java/org/opensearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/opensearch/action/search/TransportSearchActionTests.java index 413fff98de44c..05df501faae82 100644 --- a/server/src/test/java/org/opensearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/TransportSearchActionTests.java @@ -54,7 +54,7 @@ import org.opensearch.common.SetOnce; import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.Strings; import org.opensearch.core.index.Index; import org.opensearch.index.query.InnerHitBuilder; diff --git a/server/src/test/java/org/opensearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/server/src/test/java/org/opensearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 23654c02f0901..5c328c279fabb 100644 --- a/server/src/test/java/org/opensearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/server/src/test/java/org/opensearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -73,7 +73,7 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.ReceiveTimeoutTransportException; import org.opensearch.transport.TestTransportChannel; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import org.junit.After; import org.junit.AfterClass; diff --git a/server/src/test/java/org/opensearch/action/support/replication/ReplicationOperationTests.java b/server/src/test/java/org/opensearch/action/support/replication/ReplicationOperationTests.java index 21d14fb31059d..10d5a87e8200a 100644 --- a/server/src/test/java/org/opensearch/action/support/replication/ReplicationOperationTests.java +++ b/server/src/test/java/org/opensearch/action/support/replication/ReplicationOperationTests.java @@ -51,7 +51,7 @@ import org.opensearch.cluster.routing.ShardRoutingState; import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.breaker.CircuitBreakingException; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; diff --git a/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationActionTests.java b/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationActionTests.java index 5af358cae18a8..7d47238b8e5ab 100644 --- a/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationActionTests.java +++ b/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationActionTests.java @@ -102,7 +102,7 @@ import org.opensearch.transport.TransportChannel; import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportRequest; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import org.opensearch.transport.nio.MockNioTransport; diff --git a/server/src/test/java/org/opensearch/action/support/replication/TransportWriteActionForIndexingPressureTests.java b/server/src/test/java/org/opensearch/action/support/replication/TransportWriteActionForIndexingPressureTests.java index c978031103ff2..4a7500f16197d 100644 --- a/server/src/test/java/org/opensearch/action/support/replication/TransportWriteActionForIndexingPressureTests.java +++ b/server/src/test/java/org/opensearch/action/support/replication/TransportWriteActionForIndexingPressureTests.java @@ -41,7 +41,7 @@ import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportChannel; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import org.hamcrest.Matcher; import org.junit.After; diff --git a/server/src/test/java/org/opensearch/action/support/replication/TransportWriteActionTests.java b/server/src/test/java/org/opensearch/action/support/replication/TransportWriteActionTests.java index 7a0d0f3814100..a2d5e92fa11fe 100644 --- a/server/src/test/java/org/opensearch/action/support/replication/TransportWriteActionTests.java +++ b/server/src/test/java/org/opensearch/action/support/replication/TransportWriteActionTests.java @@ -72,7 +72,7 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.Transport; import org.opensearch.transport.TransportException; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import org.junit.After; import org.junit.AfterClass; diff --git a/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java b/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java index 15aacd25b30b1..69102d2e76bef 100644 --- a/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java +++ b/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java @@ -40,8 +40,8 @@ import org.opensearch.common.CheckedConsumer; import org.opensearch.common.io.PathUtils; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.discovery.DiscoveryModule; import org.opensearch.discovery.SettingsBasedSeedHostsProvider; import org.opensearch.env.Environment; diff --git a/server/src/test/java/org/opensearch/cluster/ClusterStateTests.java b/server/src/test/java/org/opensearch/cluster/ClusterStateTests.java index 23c2506bf6143..20b68f32a69e4 100644 --- a/server/src/test/java/org/opensearch/cluster/ClusterStateTests.java +++ b/server/src/test/java/org/opensearch/cluster/ClusterStateTests.java @@ -51,7 +51,7 @@ import org.opensearch.cluster.routing.TestShardRouting; import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.json.JsonXContent; diff --git a/server/src/test/java/org/opensearch/cluster/NodeConnectionsServiceTests.java b/server/src/test/java/org/opensearch/cluster/NodeConnectionsServiceTests.java index a284269dc4151..25956f1a560f2 100644 --- a/server/src/test/java/org/opensearch/cluster/NodeConnectionsServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/NodeConnectionsServiceTests.java @@ -48,8 +48,8 @@ import org.opensearch.common.lifecycle.Lifecycle; import org.opensearch.common.lifecycle.LifecycleListener; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.MockLogAppender; import org.opensearch.test.junit.annotations.TestLogging; diff --git a/server/src/test/java/org/opensearch/cluster/action/shard/ShardStateActionTests.java b/server/src/test/java/org/opensearch/cluster/action/shard/ShardStateActionTests.java index 2688aaa145dc0..957da6ea65b89 100644 --- a/server/src/test/java/org/opensearch/cluster/action/shard/ShardStateActionTests.java +++ b/server/src/test/java/org/opensearch/cluster/action/shard/ShardStateActionTests.java @@ -64,7 +64,7 @@ import org.opensearch.transport.NodeNotConnectedException; import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportRequest; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import org.junit.After; import org.junit.AfterClass; diff --git a/server/src/test/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelperTests.java b/server/src/test/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelperTests.java index b091130db0b98..efdf4fb4b92f5 100644 --- a/server/src/test/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelperTests.java +++ b/server/src/test/java/org/opensearch/cluster/coordination/ClusterFormationFailureHelperTests.java @@ -41,7 +41,7 @@ import org.opensearch.cluster.node.DiscoveryNodeRole; import org.opensearch.cluster.node.DiscoveryNodes; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.gateway.GatewayMetaState; import org.opensearch.monitor.StatusInfo; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/cluster/coordination/CoordinationStateTests.java b/server/src/test/java/org/opensearch/cluster/coordination/CoordinationStateTests.java index 23087e6dd2ba4..bae7581f8d9e0 100644 --- a/server/src/test/java/org/opensearch/cluster/coordination/CoordinationStateTests.java +++ b/server/src/test/java/org/opensearch/cluster/coordination/CoordinationStateTests.java @@ -43,7 +43,7 @@ import org.opensearch.cluster.node.DiscoveryNodes; import org.opensearch.common.UUIDs; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.EqualsHashCodeTestUtils; import org.junit.Before; diff --git a/server/src/test/java/org/opensearch/cluster/coordination/FollowersCheckerTests.java b/server/src/test/java/org/opensearch/cluster/coordination/FollowersCheckerTests.java index be211a16cdd72..8e6d7346677c3 100644 --- a/server/src/test/java/org/opensearch/cluster/coordination/FollowersCheckerTests.java +++ b/server/src/test/java/org/opensearch/cluster/coordination/FollowersCheckerTests.java @@ -53,8 +53,8 @@ import org.opensearch.transport.ConnectTransportException; import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportRequest; -import org.opensearch.transport.TransportResponse; -import org.opensearch.transport.TransportResponse.Empty; +import org.opensearch.core.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse.Empty; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; diff --git a/server/src/test/java/org/opensearch/cluster/coordination/JoinHelperTests.java b/server/src/test/java/org/opensearch/cluster/coordination/JoinHelperTests.java index 27146829ad8da..f63f33f345411 100644 --- a/server/src/test/java/org/opensearch/cluster/coordination/JoinHelperTests.java +++ b/server/src/test/java/org/opensearch/cluster/coordination/JoinHelperTests.java @@ -53,7 +53,7 @@ import org.opensearch.transport.RemoteTransportException; import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportRequest; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import java.io.IOException; diff --git a/server/src/test/java/org/opensearch/cluster/coordination/LeaderCheckerTests.java b/server/src/test/java/org/opensearch/cluster/coordination/LeaderCheckerTests.java index ac6d885229f9e..2ebeb9b4edeb8 100644 --- a/server/src/test/java/org/opensearch/cluster/coordination/LeaderCheckerTests.java +++ b/server/src/test/java/org/opensearch/cluster/coordination/LeaderCheckerTests.java @@ -50,8 +50,8 @@ import org.opensearch.transport.ConnectTransportException; import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportRequest; -import org.opensearch.transport.TransportResponse; -import org.opensearch.transport.TransportResponse.Empty; +import org.opensearch.core.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse.Empty; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; diff --git a/server/src/test/java/org/opensearch/cluster/coordination/NodeJoinTests.java b/server/src/test/java/org/opensearch/cluster/coordination/NodeJoinTests.java index fb2e7cd73d3bf..09a7b34958f2c 100644 --- a/server/src/test/java/org/opensearch/cluster/coordination/NodeJoinTests.java +++ b/server/src/test/java/org/opensearch/cluster/coordination/NodeJoinTests.java @@ -68,7 +68,7 @@ import org.opensearch.transport.Transport; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestOptions; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import org.junit.After; import org.junit.AfterClass; diff --git a/server/src/test/java/org/opensearch/cluster/coordination/PublicationTests.java b/server/src/test/java/org/opensearch/cluster/coordination/PublicationTests.java index 517456f54b785..366f19f74aac8 100644 --- a/server/src/test/java/org/opensearch/cluster/coordination/PublicationTests.java +++ b/server/src/test/java/org/opensearch/cluster/coordination/PublicationTests.java @@ -47,7 +47,7 @@ import org.opensearch.discovery.Discovery; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.transport.TransportException; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import java.util.ArrayList; import java.util.Arrays; diff --git a/server/src/test/java/org/opensearch/cluster/node/DiscoveryNodeFiltersTests.java b/server/src/test/java/org/opensearch/cluster/node/DiscoveryNodeFiltersTests.java index 54f9c46e999d7..691a3cb418f94 100644 --- a/server/src/test/java/org/opensearch/cluster/node/DiscoveryNodeFiltersTests.java +++ b/server/src/test/java/org/opensearch/cluster/node/DiscoveryNodeFiltersTests.java @@ -35,7 +35,7 @@ import org.opensearch.Version; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.test.OpenSearchTestCase; import org.junit.AfterClass; import org.junit.BeforeClass; diff --git a/server/src/test/java/org/opensearch/cluster/node/DiscoveryNodeTests.java b/server/src/test/java/org/opensearch/cluster/node/DiscoveryNodeTests.java index 14f6880419286..b4f03b1d7a850 100644 --- a/server/src/test/java/org/opensearch/cluster/node/DiscoveryNodeTests.java +++ b/server/src/test/java/org/opensearch/cluster/node/DiscoveryNodeTests.java @@ -37,7 +37,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.test.NodeRoles; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/cluster/node/DiscoveryNodesTests.java b/server/src/test/java/org/opensearch/cluster/node/DiscoveryNodesTests.java index 47676ecc13f5e..f5b7d98bb95ce 100644 --- a/server/src/test/java/org/opensearch/cluster/node/DiscoveryNodesTests.java +++ b/server/src/test/java/org/opensearch/cluster/node/DiscoveryNodesTests.java @@ -36,7 +36,7 @@ import org.opensearch.LegacyESVersion; import org.opensearch.Version; import org.opensearch.common.settings.Setting; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.test.OpenSearchTestCase; import java.util.ArrayList; diff --git a/server/src/test/java/org/opensearch/cluster/routing/RoutingNodeTests.java b/server/src/test/java/org/opensearch/cluster/routing/RoutingNodeTests.java index 578d537653684..cc4f2e510cb31 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/RoutingNodeTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/RoutingNodeTests.java @@ -35,7 +35,7 @@ import org.opensearch.Version; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.index.Index; import org.opensearch.core.index.shard.ShardId; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/common/network/NetworkModuleTests.java b/server/src/test/java/org/opensearch/common/network/NetworkModuleTests.java index f795df2f48b22..5b47d9babd264 100644 --- a/server/src/test/java/org/opensearch/common/network/NetworkModuleTests.java +++ b/server/src/test/java/org/opensearch/common/network/NetworkModuleTests.java @@ -36,7 +36,7 @@ import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.common.util.concurrent.ThreadContext; diff --git a/server/src/test/java/org/opensearch/common/transport/BoundTransportAddressTests.java b/server/src/test/java/org/opensearch/common/transport/BoundTransportAddressTests.java index a177840059f8f..dae16d805efbf 100644 --- a/server/src/test/java/org/opensearch/common/transport/BoundTransportAddressTests.java +++ b/server/src/test/java/org/opensearch/common/transport/BoundTransportAddressTests.java @@ -34,6 +34,8 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.test.OpenSearchTestCase; import java.net.InetAddress; diff --git a/server/src/test/java/org/opensearch/discovery/FileBasedSeedHostsProviderTests.java b/server/src/test/java/org/opensearch/discovery/FileBasedSeedHostsProviderTests.java index acf94483c8116..0f8c40a0bd759 100644 --- a/server/src/test/java/org/opensearch/discovery/FileBasedSeedHostsProviderTests.java +++ b/server/src/test/java/org/opensearch/discovery/FileBasedSeedHostsProviderTests.java @@ -36,8 +36,8 @@ import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.CancellableThreads; import org.opensearch.common.util.PageCacheRecycler; diff --git a/server/src/test/java/org/opensearch/discovery/HandshakingTransportAddressConnectorTests.java b/server/src/test/java/org/opensearch/discovery/HandshakingTransportAddressConnectorTests.java index 5d252168c7b28..e4703626d08fc 100644 --- a/server/src/test/java/org/opensearch/discovery/HandshakingTransportAddressConnectorTests.java +++ b/server/src/test/java/org/opensearch/discovery/HandshakingTransportAddressConnectorTests.java @@ -43,7 +43,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.SetOnce; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.MockLogAppender; import org.opensearch.test.junit.annotations.TestLogging; diff --git a/server/src/test/java/org/opensearch/discovery/InitializeExtensionRequestTests.java b/server/src/test/java/org/opensearch/discovery/InitializeExtensionRequestTests.java index 63b79c9b53081..0be812ce847f8 100644 --- a/server/src/test/java/org/opensearch/discovery/InitializeExtensionRequestTests.java +++ b/server/src/test/java/org/opensearch/discovery/InitializeExtensionRequestTests.java @@ -13,7 +13,7 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.BytesStreamInput; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.extensions.DiscoveryExtensionNode; import org.opensearch.extensions.ExtensionDependency; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/discovery/PeerFinderTests.java b/server/src/test/java/org/opensearch/discovery/PeerFinderTests.java index 91eec3d2edfaf..c3d27119b61be 100644 --- a/server/src/test/java/org/opensearch/discovery/PeerFinderTests.java +++ b/server/src/test/java/org/opensearch/discovery/PeerFinderTests.java @@ -42,7 +42,7 @@ import org.opensearch.cluster.node.DiscoveryNodes.Builder; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.discovery.PeerFinder.TransportAddressConnector; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.transport.CapturingTransport; diff --git a/server/src/test/java/org/opensearch/discovery/SeedHostsResolverTests.java b/server/src/test/java/org/opensearch/discovery/SeedHostsResolverTests.java index 07491cc0d6435..724d3fbee3939 100644 --- a/server/src/test/java/org/opensearch/discovery/SeedHostsResolverTests.java +++ b/server/src/test/java/org/opensearch/discovery/SeedHostsResolverTests.java @@ -39,8 +39,8 @@ import org.opensearch.common.network.NetworkAddress; import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.CancellableThreads; import org.opensearch.common.util.PageCacheRecycler; diff --git a/server/src/test/java/org/opensearch/discovery/SettingsBasedSeedHostsProviderTests.java b/server/src/test/java/org/opensearch/discovery/SettingsBasedSeedHostsProviderTests.java index 4648c98bec5df..5801ad0d08818 100644 --- a/server/src/test/java/org/opensearch/discovery/SettingsBasedSeedHostsProviderTests.java +++ b/server/src/test/java/org/opensearch/discovery/SettingsBasedSeedHostsProviderTests.java @@ -32,7 +32,7 @@ package org.opensearch.discovery; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.set.Sets; import org.opensearch.discovery.SeedHostsProvider.HostsResolver; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/extensions/DiscoveryExtensionNodeTests.java b/server/src/test/java/org/opensearch/extensions/DiscoveryExtensionNodeTests.java index 578e7503b76a9..8146062c15b73 100644 --- a/server/src/test/java/org/opensearch/extensions/DiscoveryExtensionNodeTests.java +++ b/server/src/test/java/org/opensearch/extensions/DiscoveryExtensionNodeTests.java @@ -10,7 +10,7 @@ import org.opensearch.OpenSearchException; import org.opensearch.Version; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.test.OpenSearchTestCase; import java.net.InetAddress; diff --git a/server/src/test/java/org/opensearch/extensions/ExtensionsManagerTests.java b/server/src/test/java/org/opensearch/extensions/ExtensionsManagerTests.java index 9f1050351c7b5..840605b61c500 100644 --- a/server/src/test/java/org/opensearch/extensions/ExtensionsManagerTests.java +++ b/server/src/test/java/org/opensearch/extensions/ExtensionsManagerTests.java @@ -55,7 +55,7 @@ import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.WriteableSetting.SettingType; import org.opensearch.common.settings.SettingsModule; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.BytesStreamInput; @@ -77,7 +77,7 @@ import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.Transport; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import org.opensearch.transport.nio.MockNioTransport; import org.opensearch.usage.UsageService; diff --git a/server/src/test/java/org/opensearch/extensions/action/ExtensionTransportActionsHandlerTests.java b/server/src/test/java/org/opensearch/extensions/action/ExtensionTransportActionsHandlerTests.java index bc216bd3bbbb8..fb5a6e374b9a3 100644 --- a/server/src/test/java/org/opensearch/extensions/action/ExtensionTransportActionsHandlerTests.java +++ b/server/src/test/java/org/opensearch/extensions/action/ExtensionTransportActionsHandlerTests.java @@ -20,7 +20,7 @@ import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.extensions.DiscoveryExtensionNode; import org.opensearch.extensions.AcknowledgedResponse; diff --git a/server/src/test/java/org/opensearch/extensions/rest/RestSendToExtensionActionTests.java b/server/src/test/java/org/opensearch/extensions/rest/RestSendToExtensionActionTests.java index b59513fc0045d..f0cdbd6c96d41 100644 --- a/server/src/test/java/org/opensearch/extensions/rest/RestSendToExtensionActionTests.java +++ b/server/src/test/java/org/opensearch/extensions/rest/RestSendToExtensionActionTests.java @@ -38,7 +38,7 @@ import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsModule; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.extensions.DiscoveryExtensionNode; diff --git a/server/src/test/java/org/opensearch/gateway/GatewayServiceTests.java b/server/src/test/java/org/opensearch/gateway/GatewayServiceTests.java index 2d8a26f8bbe87..b8607c0aa89ab 100644 --- a/server/src/test/java/org/opensearch/gateway/GatewayServiceTests.java +++ b/server/src/test/java/org/opensearch/gateway/GatewayServiceTests.java @@ -50,7 +50,7 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.snapshots.EmptySnapshotsInfoService; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java index fbf0e8cd42c72..5ea003b323fd6 100644 --- a/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/opensearch/http/AbstractHttpServerTransportTests.java @@ -41,7 +41,7 @@ import org.opensearch.common.network.NetworkUtils; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.common.util.concurrent.ThreadContext; diff --git a/server/src/test/java/org/opensearch/http/HttpInfoTests.java b/server/src/test/java/org/opensearch/http/HttpInfoTests.java index d03ae9a2a1ccb..030e7cc89ab76 100644 --- a/server/src/test/java/org/opensearch/http/HttpInfoTests.java +++ b/server/src/test/java/org/opensearch/http/HttpInfoTests.java @@ -36,8 +36,8 @@ import java.net.InetAddress; import java.util.Map; import org.opensearch.common.network.NetworkAddress; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; diff --git a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationSourceServiceTests.java b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationSourceServiceTests.java index bee1bedc892d8..31ad67f35cdd2 100644 --- a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationSourceServiceTests.java +++ b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationSourceServiceTests.java @@ -34,7 +34,7 @@ import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportException; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; diff --git a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java index 94e57f4a0d3e4..efb8dda201e87 100644 --- a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java +++ b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java @@ -41,7 +41,7 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.EmptyTransportResponseHandler; import org.opensearch.transport.TransportRequestOptions; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import org.opensearch.test.transport.CapturingTransport; import java.io.IOException; diff --git a/server/src/test/java/org/opensearch/node/NodeTests.java b/server/src/test/java/org/opensearch/node/NodeTests.java index 6dfecd8a692f1..816dc47c96f8a 100644 --- a/server/src/test/java/org/opensearch/node/NodeTests.java +++ b/server/src/test/java/org/opensearch/node/NodeTests.java @@ -41,7 +41,7 @@ import org.opensearch.common.network.NetworkModule; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsException; -import org.opensearch.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.env.Environment; diff --git a/server/src/test/java/org/opensearch/node/ResponseCollectorServiceTests.java b/server/src/test/java/org/opensearch/node/ResponseCollectorServiceTests.java index 2b13df3027cfa..7ca1f1e864b99 100644 --- a/server/src/test/java/org/opensearch/node/ResponseCollectorServiceTests.java +++ b/server/src/test/java/org/opensearch/node/ResponseCollectorServiceTests.java @@ -40,7 +40,7 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; diff --git a/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java b/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java index 347fef773fc8a..31df1b3ab80b3 100644 --- a/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java +++ b/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java @@ -40,8 +40,8 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/test/java/org/opensearch/rest/BytesRestResponseTests.java b/server/src/test/java/org/opensearch/rest/BytesRestResponseTests.java index 5bdc2cc0bd280..9e851cef62655 100644 --- a/server/src/test/java/org/opensearch/rest/BytesRestResponseTests.java +++ b/server/src/test/java/org/opensearch/rest/BytesRestResponseTests.java @@ -42,7 +42,7 @@ import org.opensearch.action.search.ShardSearchFailure; import org.opensearch.core.common.ParsingException; import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; diff --git a/server/src/test/java/org/opensearch/rest/RestControllerTests.java b/server/src/test/java/org/opensearch/rest/RestControllerTests.java index ea0ce54913a8d..b4fa7574f0ff0 100644 --- a/server/src/test/java/org/opensearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/opensearch/rest/RestControllerTests.java @@ -39,8 +39,8 @@ import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.core.rest.RestStatus; diff --git a/server/src/test/java/org/opensearch/rest/action/RestBuilderListenerTests.java b/server/src/test/java/org/opensearch/rest/action/RestBuilderListenerTests.java index d865607aa5451..662ba202c8c5c 100644 --- a/server/src/test/java/org/opensearch/rest/action/RestBuilderListenerTests.java +++ b/server/src/test/java/org/opensearch/rest/action/RestBuilderListenerTests.java @@ -40,8 +40,8 @@ import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.rest.FakeRestChannel; import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.transport.TransportResponse; -import org.opensearch.transport.TransportResponse.Empty; +import org.opensearch.core.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse.Empty; import java.util.concurrent.atomic.AtomicReference; diff --git a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java index 32e0edf7c8f11..92781cf160afc 100644 --- a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java @@ -158,7 +158,7 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.IndexScopedSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.common.util.concurrent.AbstractRunnable; diff --git a/server/src/test/java/org/opensearch/transport/ClusterConnectionManagerTests.java b/server/src/test/java/org/opensearch/transport/ClusterConnectionManagerTests.java index bf47fc2cc9b45..e3d3e17c41fcb 100644 --- a/server/src/test/java/org/opensearch/transport/ClusterConnectionManagerTests.java +++ b/server/src/test/java/org/opensearch/transport/ClusterConnectionManagerTests.java @@ -37,7 +37,7 @@ import org.opensearch.action.support.PlainActionFuture; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.ThreadPool; diff --git a/server/src/test/java/org/opensearch/transport/InboundDecoderTests.java b/server/src/test/java/org/opensearch/transport/InboundDecoderTests.java index 4d8955650f8be..009dc829e8168 100644 --- a/server/src/test/java/org/opensearch/transport/InboundDecoderTests.java +++ b/server/src/test/java/org/opensearch/transport/InboundDecoderTests.java @@ -39,6 +39,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.core.transport.TransportMessage; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.VersionUtils; diff --git a/server/src/test/java/org/opensearch/transport/OutboundHandlerTests.java b/server/src/test/java/org/opensearch/transport/OutboundHandlerTests.java index fe8be400b03c9..ad82fd6ada5f4 100644 --- a/server/src/test/java/org/opensearch/transport/OutboundHandlerTests.java +++ b/server/src/test/java/org/opensearch/transport/OutboundHandlerTests.java @@ -43,12 +43,13 @@ import org.opensearch.common.bytes.ReleasableBytesReference; import org.opensearch.common.collect.Tuple; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.PageCacheRecycler; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.common.util.io.Streams; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; diff --git a/server/src/test/java/org/opensearch/transport/ProxyConnectionStrategyTests.java b/server/src/test/java/org/opensearch/transport/ProxyConnectionStrategyTests.java index 1451e9466778b..510a2b3abd943 100644 --- a/server/src/test/java/org/opensearch/transport/ProxyConnectionStrategyTests.java +++ b/server/src/test/java/org/opensearch/transport/ProxyConnectionStrategyTests.java @@ -41,7 +41,7 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.transport.MockTransportService; import org.opensearch.threadpool.TestThreadPool; diff --git a/server/src/test/java/org/opensearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/opensearch/transport/RemoteClusterConnectionTests.java index 4106ebd9988c9..92f5f455f7caa 100644 --- a/server/src/test/java/org/opensearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/opensearch/transport/RemoteClusterConnectionTests.java @@ -56,7 +56,7 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; diff --git a/server/src/test/java/org/opensearch/transport/RemoteConnectionManagerTests.java b/server/src/test/java/org/opensearch/transport/RemoteConnectionManagerTests.java index ccba66ff4c45e..5741024850756 100644 --- a/server/src/test/java/org/opensearch/transport/RemoteConnectionManagerTests.java +++ b/server/src/test/java/org/opensearch/transport/RemoteConnectionManagerTests.java @@ -36,7 +36,7 @@ import org.opensearch.action.support.PlainActionFuture; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.test.OpenSearchTestCase; import java.net.InetAddress; diff --git a/server/src/test/java/org/opensearch/transport/SniffConnectionStrategyTests.java b/server/src/test/java/org/opensearch/transport/SniffConnectionStrategyTests.java index 975da2de82ae6..a91b114b5ee84 100644 --- a/server/src/test/java/org/opensearch/transport/SniffConnectionStrategyTests.java +++ b/server/src/test/java/org/opensearch/transport/SniffConnectionStrategyTests.java @@ -47,7 +47,7 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.Strings; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.VersionUtils; diff --git a/server/src/test/java/org/opensearch/transport/TcpTransportTests.java b/server/src/test/java/org/opensearch/transport/TcpTransportTests.java index 39bc2d9bd2d48..ba216b31a1f1d 100644 --- a/server/src/test/java/org/opensearch/transport/TcpTransportTests.java +++ b/server/src/test/java/org/opensearch/transport/TcpTransportTests.java @@ -43,7 +43,7 @@ import org.opensearch.common.network.NetworkService; import org.opensearch.common.network.NetworkUtils; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; diff --git a/server/src/test/java/org/opensearch/transport/TransportActionProxyTests.java b/server/src/test/java/org/opensearch/transport/TransportActionProxyTests.java index d1d46d5be13c0..cf37ca8cbdc25 100644 --- a/server/src/test/java/org/opensearch/transport/TransportActionProxyTests.java +++ b/server/src/test/java/org/opensearch/transport/TransportActionProxyTests.java @@ -39,6 +39,7 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.io.IOUtils; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.transport.MockTransportService; import org.opensearch.threadpool.TestThreadPool; diff --git a/server/src/test/java/org/opensearch/transport/TransportHandshakerTests.java b/server/src/test/java/org/opensearch/transport/TransportHandshakerTests.java index 58c8806380436..6bbe926ea0384 100644 --- a/server/src/test/java/org/opensearch/transport/TransportHandshakerTests.java +++ b/server/src/test/java/org/opensearch/transport/TransportHandshakerTests.java @@ -37,6 +37,7 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.tasks.TaskId; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; diff --git a/server/src/test/java/org/opensearch/transport/TransportInfoTests.java b/server/src/test/java/org/opensearch/transport/TransportInfoTests.java index 402c4183fa2b8..8360346ce3c42 100644 --- a/server/src/test/java/org/opensearch/transport/TransportInfoTests.java +++ b/server/src/test/java/org/opensearch/transport/TransportInfoTests.java @@ -33,8 +33,8 @@ package org.opensearch.transport; import org.opensearch.common.network.NetworkAddress; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; diff --git a/server/src/test/java/org/opensearch/transport/TransportServiceDeserializationFailureTests.java b/server/src/test/java/org/opensearch/transport/TransportServiceDeserializationFailureTests.java index 01d6a4d331477..c23c213353888 100644 --- a/server/src/test/java/org/opensearch/transport/TransportServiceDeserializationFailureTests.java +++ b/server/src/test/java/org/opensearch/transport/TransportServiceDeserializationFailureTests.java @@ -39,6 +39,7 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.tasks.Task; import org.opensearch.tasks.TaskAwareRequest; import org.opensearch.tasks.TaskId; diff --git a/test/framework/src/main/java/org/opensearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/opensearch/cluster/coordination/AbstractCoordinatorTestCase.java index 72fabc001760f..0d1d30014a9ac 100644 --- a/test/framework/src/main/java/org/opensearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/opensearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -66,7 +66,7 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.MockBigArrays; diff --git a/test/framework/src/main/java/org/opensearch/node/MockNode.java b/test/framework/src/main/java/org/opensearch/node/MockNode.java index 7d3a88fb1aba2..59c78d32c4c3c 100644 --- a/test/framework/src/main/java/org/opensearch/node/MockNode.java +++ b/test/framework/src/main/java/org/opensearch/node/MockNode.java @@ -41,7 +41,7 @@ import org.opensearch.common.network.NetworkModule; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; diff --git a/test/framework/src/main/java/org/opensearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/opensearch/test/ExternalTestCluster.java index 1869fc8c9b447..943a99c02a00b 100644 --- a/test/framework/src/main/java/org/opensearch/test/ExternalTestCluster.java +++ b/test/framework/src/main/java/org/opensearch/test/ExternalTestCluster.java @@ -45,7 +45,7 @@ import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.network.NetworkModule; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.env.Environment; import org.opensearch.http.HttpInfo; import org.opensearch.node.MockNode; diff --git a/test/framework/src/main/java/org/opensearch/test/MockHttpTransport.java b/test/framework/src/main/java/org/opensearch/test/MockHttpTransport.java index e156449adc184..841cf62620f0d 100644 --- a/test/framework/src/main/java/org/opensearch/test/MockHttpTransport.java +++ b/test/framework/src/main/java/org/opensearch/test/MockHttpTransport.java @@ -33,8 +33,8 @@ package org.opensearch.test; import org.opensearch.common.lifecycle.AbstractLifecycleComponent; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.http.HttpInfo; import org.opensearch.http.HttpServerTransport; import org.opensearch.http.HttpStats; diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index 5e79bec91bd90..45d69703456fc 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -106,7 +106,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java index 14275d838e6a9..7b787e12be64a 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java @@ -89,7 +89,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateUtils; import org.opensearch.common.time.FormatNames; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.common.util.concurrent.ThreadContext; diff --git a/test/framework/src/main/java/org/opensearch/test/disruption/DisruptableMockTransport.java b/test/framework/src/main/java/org/opensearch/test/disruption/DisruptableMockTransport.java index dd025555d1ae8..1cd60690ca9d5 100644 --- a/test/framework/src/main/java/org/opensearch/test/disruption/DisruptableMockTransport.java +++ b/test/framework/src/main/java/org/opensearch/test/disruption/DisruptableMockTransport.java @@ -39,8 +39,8 @@ import org.opensearch.common.Nullable; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.test.transport.MockTransport; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.CloseableConnection; @@ -52,7 +52,7 @@ import org.opensearch.transport.TransportInterceptor; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestOptions; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportService; import java.io.IOException; diff --git a/test/framework/src/main/java/org/opensearch/test/transport/FakeTransport.java b/test/framework/src/main/java/org/opensearch/test/transport/FakeTransport.java index 4d59afd5f99ed..9354d244e4c06 100644 --- a/test/framework/src/main/java/org/opensearch/test/transport/FakeTransport.java +++ b/test/framework/src/main/java/org/opensearch/test/transport/FakeTransport.java @@ -35,8 +35,8 @@ import org.opensearch.action.ActionListener; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.lifecycle.AbstractLifecycleComponent; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.transport.CloseableConnection; import org.opensearch.transport.ConnectionProfile; import org.opensearch.transport.Transport; diff --git a/test/framework/src/main/java/org/opensearch/test/transport/MockTransport.java b/test/framework/src/main/java/org/opensearch/test/transport/MockTransport.java index 36f3e6cb4b692..0974a5f1f5671 100644 --- a/test/framework/src/main/java/org/opensearch/test/transport/MockTransport.java +++ b/test/framework/src/main/java/org/opensearch/test/transport/MockTransport.java @@ -43,7 +43,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.CloseableConnection; import org.opensearch.transport.ClusterConnectionManager; @@ -54,7 +54,7 @@ import org.opensearch.transport.TransportMessageListener; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestOptions; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; diff --git a/test/framework/src/main/java/org/opensearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/opensearch/test/transport/MockTransportService.java index 7a1d730ac1b27..5cf451980ec98 100644 --- a/test/framework/src/main/java/org/opensearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/opensearch/test/transport/MockTransportService.java @@ -46,8 +46,8 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.common.util.concurrent.AbstractRunnable; diff --git a/test/framework/src/main/java/org/opensearch/test/transport/StubbableConnectionManager.java b/test/framework/src/main/java/org/opensearch/test/transport/StubbableConnectionManager.java index 53cae12871d92..47de32acde351 100644 --- a/test/framework/src/main/java/org/opensearch/test/transport/StubbableConnectionManager.java +++ b/test/framework/src/main/java/org/opensearch/test/transport/StubbableConnectionManager.java @@ -33,7 +33,7 @@ import org.opensearch.action.ActionListener; import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.transport.ConnectTransportException; import org.opensearch.transport.ConnectionProfile; import org.opensearch.transport.ConnectionManager; diff --git a/test/framework/src/main/java/org/opensearch/test/transport/StubbableTransport.java b/test/framework/src/main/java/org/opensearch/test/transport/StubbableTransport.java index 8d66d481dc4aa..0aeb5f77af8c8 100644 --- a/test/framework/src/main/java/org/opensearch/test/transport/StubbableTransport.java +++ b/test/framework/src/main/java/org/opensearch/test/transport/StubbableTransport.java @@ -37,8 +37,8 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.lifecycle.Lifecycle; import org.opensearch.common.lifecycle.LifecycleListener; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.tasks.Task; import org.opensearch.transport.ConnectionProfile; import org.opensearch.transport.RequestHandlerRegistry; diff --git a/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java index 29619a541722c..6beda60ad4145 100644 --- a/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java @@ -58,12 +58,13 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.BoundTransportAddress; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.BoundTransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.common.util.io.IOUtils; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.node.Node; import org.opensearch.tasks.Task; import org.opensearch.test.OpenSearchTestCase; diff --git a/test/framework/src/main/java/org/opensearch/transport/TestResponse.java b/test/framework/src/main/java/org/opensearch/transport/TestResponse.java index 09dd50d656004..14db8b3372bf2 100644 --- a/test/framework/src/main/java/org/opensearch/transport/TestResponse.java +++ b/test/framework/src/main/java/org/opensearch/transport/TestResponse.java @@ -33,6 +33,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.transport.TransportResponse; import java.io.IOException; diff --git a/test/framework/src/main/java/org/opensearch/transport/TestTransportChannel.java b/test/framework/src/main/java/org/opensearch/transport/TestTransportChannel.java index 819094c9fb089..64bb01afe3430 100644 --- a/test/framework/src/main/java/org/opensearch/transport/TestTransportChannel.java +++ b/test/framework/src/main/java/org/opensearch/transport/TestTransportChannel.java @@ -33,6 +33,7 @@ package org.opensearch.transport; import org.opensearch.action.ActionListener; +import org.opensearch.core.transport.TransportResponse; public class TestTransportChannel implements TransportChannel { diff --git a/test/framework/src/test/java/org/opensearch/test/disruption/DisruptableMockTransportTests.java b/test/framework/src/test/java/org/opensearch/test/disruption/DisruptableMockTransportTests.java index 9bddaf013aef4..46c1cfac5c5c5 100644 --- a/test/framework/src/test/java/org/opensearch/test/disruption/DisruptableMockTransportTests.java +++ b/test/framework/src/test/java/org/opensearch/test/disruption/DisruptableMockTransportTests.java @@ -39,7 +39,7 @@ import org.opensearch.common.collect.Tuple; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.node.Node; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.disruption.DisruptableMockTransport.ConnectionStatus; @@ -49,8 +49,8 @@ import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestHandler; -import org.opensearch.transport.TransportResponse; -import org.opensearch.transport.TransportResponse.Empty; +import org.opensearch.core.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse.Empty; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; import org.junit.Before; diff --git a/test/framework/src/test/java/org/opensearch/test/disruption/NetworkDisruptionIT.java b/test/framework/src/test/java/org/opensearch/test/disruption/NetworkDisruptionIT.java index a5112bc958954..427f217d07e2a 100644 --- a/test/framework/src/test/java/org/opensearch/test/disruption/NetworkDisruptionIT.java +++ b/test/framework/src/test/java/org/opensearch/test/disruption/NetworkDisruptionIT.java @@ -46,7 +46,7 @@ import org.opensearch.test.transport.MockTransportService; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportException; -import org.opensearch.transport.TransportResponse; +import org.opensearch.core.transport.TransportResponse; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; diff --git a/test/framework/src/test/java/org/opensearch/transport/nio/SimpleMockNioTransportTests.java b/test/framework/src/test/java/org/opensearch/transport/nio/SimpleMockNioTransportTests.java index 868affc81be37..158a770987207 100644 --- a/test/framework/src/test/java/org/opensearch/transport/nio/SimpleMockNioTransportTests.java +++ b/test/framework/src/test/java/org/opensearch/transport/nio/SimpleMockNioTransportTests.java @@ -39,7 +39,7 @@ import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.transport.TransportAddress; +import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.core.indices.breaker.NoneCircuitBreakerService; import org.opensearch.transport.AbstractSimpleTransportTestCase; From 5670d2a2be92602c8e8e38cc8fec7ddf5737ed52 Mon Sep 17 00:00:00 2001 From: Harish Bhakuni Date: Thu, 3 Aug 2023 00:36:45 -0700 Subject: [PATCH 50/75] [Snapshot Interop] Add Logic in Lock Manager to cleanup stale data post index deletion. (#8472) Signed-off-by: Harish Bhakuni --- .../RemoteStoreBaseIntegTestCase.java | 2 +- .../snapshots/DeleteSnapshotIT.java | 76 ++++++++++++++++++- .../store/RemoteSegmentStoreDirectory.java | 16 ++-- .../RemoteSegmentStoreDirectoryFactory.java | 4 +- .../RemoteStoreLockManagerFactory.java | 8 +- .../blobstore/BlobStoreRepository.java | 43 +++++++++-- .../blobstore/BlobStoreRepositoryTests.java | 2 - .../AbstractSnapshotIntegTestCase.java | 5 ++ 8 files changed, 138 insertions(+), 18 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java index 4a85ff46d9025..ec58c29175e16 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java @@ -159,7 +159,7 @@ public void teardown() { assertAcked(clusterAdmin().prepareDeleteRepository(REPOSITORY_2_NAME)); } - public int getFileCount(Path path) throws Exception { + public static int getFileCount(Path path) throws Exception { final AtomicInteger filesExisting = new AtomicInteger(0); Files.walkFileTree(path, new SimpleFileVisitor<>() { @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java index b12fbdd2a9bd7..d38620723a8f4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/DeleteSnapshotIT.java @@ -10,22 +10,28 @@ import org.opensearch.action.ActionFuture; import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; +import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.client.Client; +import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.UUIDs; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.FeatureFlags; +import org.opensearch.remotestore.RemoteStoreBaseIntegTestCase; import org.opensearch.test.FeatureFlagSetter; import org.opensearch.test.OpenSearchIntegTestCase; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Locale; +import java.util.concurrent.TimeUnit; import java.util.stream.Stream; import static org.hamcrest.Matchers.is; import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; +import static org.hamcrest.Matchers.comparesEqualTo; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0) @@ -240,12 +246,13 @@ public void testDeleteMultipleShallowCopySnapshotsCase3() throws Exception { final String snapshotRepoName = "snapshot-repo-name"; final Path snapshotRepoPath = randomRepoPath(); createRepository(snapshotRepoName, "mock", snapshotRepoSettingsForShallowCopy(snapshotRepoPath)); - final String testIndex = "index-test"; - createIndexWithContent(testIndex); final Path remoteStoreRepoPath = randomRepoPath(); createRepository(REMOTE_REPO_NAME, "fs", remoteStoreRepoPath); + final String testIndex = "index-test"; + createIndexWithContent(testIndex); + final String remoteStoreEnabledIndexName = "remote-index-1"; final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(); createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); @@ -289,6 +296,71 @@ public void testDeleteMultipleShallowCopySnapshotsCase3() throws Exception { assert (getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME).length == 0); } + public void testRemoteStoreCleanupForDeletedIndex() throws Exception { + disableRepoConsistencyCheck("Remote store repository is being used in the test"); + FeatureFlagSetter.set(FeatureFlags.REMOTE_STORE); + + internalCluster().startClusterManagerOnlyNode(remoteStoreClusterSettings(REMOTE_REPO_NAME)); + internalCluster().startDataOnlyNode(); + final Client clusterManagerClient = internalCluster().clusterManagerClient(); + ensureStableCluster(2); + + final String snapshotRepoName = "snapshot-repo-name"; + final Path snapshotRepoPath = randomRepoPath(); + createRepository(snapshotRepoName, "mock", snapshotRepoSettingsForShallowCopy(snapshotRepoPath)); + + final Path remoteStoreRepoPath = randomRepoPath(); + createRepository(REMOTE_REPO_NAME, "fs", remoteStoreRepoPath); + + final String testIndex = "index-test"; + createIndexWithContent(testIndex); + + final String remoteStoreEnabledIndexName = "remote-index-1"; + final Settings remoteStoreEnabledIndexSettings = getRemoteStoreBackedIndexSettings(); + createIndex(remoteStoreEnabledIndexName, remoteStoreEnabledIndexSettings); + indexRandomDocs(remoteStoreEnabledIndexName, randomIntBetween(5, 10)); + + String indexUUID = client().admin() + .indices() + .prepareGetSettings(remoteStoreEnabledIndexName) + .get() + .getSetting(remoteStoreEnabledIndexName, IndexMetadata.SETTING_INDEX_UUID); + + logger.info("--> create two remote index shallow snapshots"); + List shallowCopySnapshots = createNSnapshots(snapshotRepoName, 2); + + String[] lockFiles = getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME); + assert (lockFiles.length == 2) : "lock files are " + Arrays.toString(lockFiles); + + // delete remote store index + assertAcked(client().admin().indices().prepareDelete(remoteStoreEnabledIndexName)); + + logger.info("--> delete snapshot 1"); + AcknowledgedResponse deleteSnapshotResponse = clusterManagerClient.admin() + .cluster() + .prepareDeleteSnapshot(snapshotRepoName, shallowCopySnapshots.get(0)) + .get(); + assertAcked(deleteSnapshotResponse); + + lockFiles = getLockFilesInRemoteStore(remoteStoreEnabledIndexName, REMOTE_REPO_NAME, indexUUID); + assert (lockFiles.length == 1) : "lock files are " + Arrays.toString(lockFiles); + + logger.info("--> delete snapshot 2"); + deleteSnapshotResponse = clusterManagerClient.admin() + .cluster() + .prepareDeleteSnapshot(snapshotRepoName, shallowCopySnapshots.get(1)) + .get(); + assertAcked(deleteSnapshotResponse); + + Path indexPath = Path.of(String.valueOf(remoteStoreRepoPath), indexUUID); + // Delete is async. Give time for it + assertBusy(() -> { + try { + assertThat(RemoteStoreBaseIntegTestCase.getFileCount(indexPath), comparesEqualTo(0)); + } catch (Exception e) {} + }, 30, TimeUnit.SECONDS); + } + private List createNSnapshots(String repoName, int count) { final List snapshotNames = new ArrayList<>(count); final String prefix = "snap-" + UUIDs.randomBase64UUID(random()).toLowerCase(Locale.ROOT) + "-"; diff --git a/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java b/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java index 8dfdb3e2c8e06..9ac5ebb94a5ca 100644 --- a/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java +++ b/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java @@ -837,29 +837,36 @@ public void deleteStaleSegments(int lastNMetadataFilesToKeep) throws IOException } } + public void deleteStaleSegmentsAsync(int lastNMetadataFilesToKeep) { + deleteStaleSegmentsAsync(lastNMetadataFilesToKeep, ActionListener.wrap(r -> {}, e -> {})); + } + /** * Delete stale segment and metadata files asynchronously. * This method calls {@link RemoteSegmentStoreDirectory#deleteStaleSegments(int)} in an async manner. * @param lastNMetadataFilesToKeep number of metadata files to keep */ - public void deleteStaleSegmentsAsync(int lastNMetadataFilesToKeep) { + public void deleteStaleSegmentsAsync(int lastNMetadataFilesToKeep, ActionListener listener) { if (canDeleteStaleCommits.compareAndSet(true, false)) { try { threadPool.executor(ThreadPool.Names.REMOTE_PURGE).execute(() -> { try { deleteStaleSegments(lastNMetadataFilesToKeep); + listener.onResponse(null); } catch (Exception e) { - logger.info( + logger.error( "Exception while deleting stale commits from remote segment store, will retry delete post next commit", e ); + listener.onFailure(e); } finally { canDeleteStaleCommits.set(true); } }); } catch (Exception e) { - logger.info("Exception occurred while scheduling deleteStaleCommits", e); + logger.error("Exception occurred while scheduling deleteStaleCommits", e); canDeleteStaleCommits.set(true); + listener.onFailure(e); } } } @@ -891,7 +898,6 @@ private boolean deleteIfEmpty() throws IOException { } public void close() throws IOException { - deleteStaleSegmentsAsync(0); - deleteIfEmpty(); + deleteStaleSegmentsAsync(0, ActionListener.wrap(r -> deleteIfEmpty(), e -> logger.error("Failed to cleanup remote directory"))); } } diff --git a/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryFactory.java b/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryFactory.java index 3bec84f287ce4..3de7a706c0688 100644 --- a/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryFactory.java +++ b/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryFactory.java @@ -13,8 +13,8 @@ import org.opensearch.common.blobstore.BlobPath; import org.opensearch.index.IndexSettings; import org.opensearch.index.shard.ShardPath; +import org.opensearch.index.store.lockmanager.RemoteStoreLockManager; import org.opensearch.index.store.lockmanager.RemoteStoreLockManagerFactory; -import org.opensearch.index.store.lockmanager.RemoteStoreMetadataLockManager; import org.opensearch.plugins.IndexStorePlugin; import org.opensearch.repositories.RepositoriesService; import org.opensearch.repositories.Repository; @@ -59,7 +59,7 @@ public Directory newDirectory(String repositoryName, String indexUUID, String sh RemoteDirectory dataDirectory = createRemoteDirectory(repository, commonBlobPath, "data"); RemoteDirectory metadataDirectory = createRemoteDirectory(repository, commonBlobPath, "metadata"); - RemoteStoreMetadataLockManager mdLockManager = RemoteStoreLockManagerFactory.newLockManager( + RemoteStoreLockManager mdLockManager = RemoteStoreLockManagerFactory.newLockManager( repositoriesService.get(), repositoryName, indexUUID, diff --git a/server/src/main/java/org/opensearch/index/store/lockmanager/RemoteStoreLockManagerFactory.java b/server/src/main/java/org/opensearch/index/store/lockmanager/RemoteStoreLockManagerFactory.java index e866551eae143..1a306f3261094 100644 --- a/server/src/main/java/org/opensearch/index/store/lockmanager/RemoteStoreLockManagerFactory.java +++ b/server/src/main/java/org/opensearch/index/store/lockmanager/RemoteStoreLockManagerFactory.java @@ -33,7 +33,7 @@ public RemoteStoreLockManagerFactory(Supplier repositoriesS this.repositoriesService = repositoriesService; } - public RemoteStoreMetadataLockManager newLockManager(String repositoryName, String indexUUID, String shardId) throws IOException { + public RemoteStoreLockManager newLockManager(String repositoryName, String indexUUID, String shardId) throws IOException { return newLockManager(repositoriesService.get(), repositoryName, indexUUID, shardId); } @@ -58,6 +58,12 @@ public static RemoteStoreMetadataLockManager newLockManager( } } + // TODO: remove this once we add poller in place to trigger remote store cleanup + // see: https://github.com/opensearch-project/OpenSearch/issues/8469 + public Supplier getRepositoriesService() { + return repositoriesService; + } + private static RemoteBufferedOutputDirectory createRemoteBufferedOutputDirectory( Repository repository, BlobPath commonBlobPath, diff --git a/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java index 70db2e0c0a9bd..f24c255f0d0da 100644 --- a/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java @@ -113,11 +113,12 @@ import org.opensearch.index.snapshots.blobstore.RateLimitingInputStream; import org.opensearch.index.snapshots.blobstore.SlicedInputStream; import org.opensearch.index.snapshots.blobstore.SnapshotFiles; +import org.opensearch.index.store.RemoteSegmentStoreDirectoryFactory; import org.opensearch.index.store.Store; import org.opensearch.index.store.StoreFileMetadata; import org.opensearch.index.store.lockmanager.FileLockInfo; +import org.opensearch.index.store.lockmanager.RemoteStoreLockManager; import org.opensearch.index.store.lockmanager.RemoteStoreLockManagerFactory; -import org.opensearch.index.store.lockmanager.RemoteStoreMetadataLockManager; import org.opensearch.indices.recovery.RecoverySettings; import org.opensearch.indices.recovery.RecoveryState; import org.opensearch.repositories.IndexId; @@ -616,7 +617,7 @@ public void cloneRemoteStoreIndexShardSnapshot( RemoteStoreShardShallowCopySnapshot remStoreBasedShardMetadata = (RemoteStoreShardShallowCopySnapshot) indexShardSnapshot; String indexUUID = remStoreBasedShardMetadata.getIndexUUID(); String remoteStoreRepository = remStoreBasedShardMetadata.getRemoteStoreRepository(); - RemoteStoreMetadataLockManager remoteStoreMetadataLockManger = remoteStoreLockManagerFactory.newLockManager( + RemoteStoreLockManager remoteStoreMetadataLockManger = remoteStoreLockManagerFactory.newLockManager( remoteStoreRepository, indexUUID, String.valueOf(shardId.shardId()) @@ -1072,11 +1073,24 @@ private void executeStaleShardDelete( // Releasing lock file before deleting the shallow-snap-UUID file because in case of any failure while // releasing the lock file, we would still have the shallow-snap-UUID file and that would be used during // next delete operation for releasing this lock file - RemoteStoreMetadataLockManager remoteStoreMetadataLockManager = remoteStoreLockManagerFactory - .newLockManager(remoteStoreRepoForIndex, indexUUID, shardId); + RemoteStoreLockManager remoteStoreMetadataLockManager = remoteStoreLockManagerFactory.newLockManager( + remoteStoreRepoForIndex, + indexUUID, + shardId + ); remoteStoreMetadataLockManager.release( FileLockInfo.getLockInfoBuilder().withAcquirerId(snapshotUUID).build() ); + if (!isIndexPresent(clusterService, indexUUID)) { + // this is a temporary solution where snapshot deletion triggers remote store side + // cleanup if index is already deleted. We will add a poller in future to take + // care of remote store side cleanup. + // see https://github.com/opensearch-project/OpenSearch/issues/8469 + new RemoteSegmentStoreDirectoryFactory( + remoteStoreLockManagerFactory.getRepositoriesService(), + threadPool + ).newDirectory(remoteStoreRepoForIndex, indexUUID, shardId).close(); + } } } } @@ -1487,6 +1501,15 @@ private void cleanupStaleIndices( } } + private static boolean isIndexPresent(ClusterService clusterService, String indexUUID) { + for (final IndexMetadata indexMetadata : clusterService.state().metadata().getIndices().values()) { + if (indexUUID.equals(indexMetadata.getIndexUUID())) { + return true; + } + } + return false; + } + private void executeOneStaleIndexDelete( BlockingQueue> staleIndicesToDelete, RemoteStoreLockManagerFactory remoteStoreLockManagerFactory, @@ -1519,11 +1542,21 @@ private void executeOneStaleIndexDelete( // Releasing lock files before deleting the shallow-snap-UUID file because in case of any failure // while releasing the lock file, we would still have the corresponding shallow-snap-UUID file // and that would be used during next delete operation for releasing this stale lock file - RemoteStoreMetadataLockManager remoteStoreMetadataLockManager = remoteStoreLockManagerFactory + RemoteStoreLockManager remoteStoreMetadataLockManager = remoteStoreLockManagerFactory .newLockManager(remoteStoreRepoForIndex, indexUUID, shardBlob.getKey()); remoteStoreMetadataLockManager.release( FileLockInfo.getLockInfoBuilder().withAcquirerId(snapshotUUID).build() ); + if (!isIndexPresent(clusterService, indexUUID)) { + // this is a temporary solution where snapshot deletion triggers remote store side + // cleanup if index is already deleted. We will add a poller in future to take + // care of remote store side cleanup. + // see https://github.com/opensearch-project/OpenSearch/issues/8469 + new RemoteSegmentStoreDirectoryFactory( + remoteStoreLockManagerFactory.getRepositoriesService(), + threadPool + ).newDirectory(remoteStoreRepoForIndex, indexUUID, shardBlob.getKey()).close(); + } } } } diff --git a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java index 26082f2456867..ee9181dba4563 100644 --- a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -166,8 +166,6 @@ public void testRetrieveSnapshots() throws Exception { assertThat(snapshotIds, equalTo(originalSnapshots)); } - // Validate Scenario remoteStoreShallowCopy Snapshot -> remoteStoreShallowCopy Snapshot - // -> remoteStoreShallowCopy Snapshot -> normal snapshot public void testReadAndWriteSnapshotsThroughIndexFile() throws Exception { final BlobStoreRepository repository = setupRepo(); final long pendingGeneration = repository.metadata.pendingGeneration(); diff --git a/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java index 2c6f4f7b15e5d..9c3f342e58111 100644 --- a/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -557,6 +557,11 @@ protected String[] getLockFilesInRemoteStore(String remoteStoreIndex, String rem .prepareGetSettings(remoteStoreIndex) .get() .getSetting(remoteStoreIndex, IndexMetadata.SETTING_INDEX_UUID); + return getLockFilesInRemoteStore(remoteStoreIndex, remoteStoreRepositoryName, indexUUID); + } + + protected String[] getLockFilesInRemoteStore(String remoteStoreIndex, String remoteStoreRepositoryName, String indexUUID) + throws IOException { final RepositoriesService repositoriesService = internalCluster().getCurrentClusterManagerNodeInstance(RepositoriesService.class); final BlobStoreRepository remoteStoreRepository = (BlobStoreRepository) repositoriesService.repository(remoteStoreRepositoryName); BlobPath shardLevelBlobPath = remoteStoreRepository.basePath().add(indexUUID).add("0").add("segments").add("lock_files"); From e55dadef57809a7fc04dac5ff4d43873beeefadc Mon Sep 17 00:00:00 2001 From: Gaurav Bafna <85113518+gbbafna@users.noreply.github.com> Date: Thu, 3 Aug 2023 16:47:20 +0530 Subject: [PATCH 51/75] Avoid duplicate indexing in case of SegRep enabled indices' translog replay (#8578) Signed-off-by: Gaurav Bafna --- .../recovery/IndexPrimaryRelocationIT.java | 14 +-- .../RemoteIndexPrimaryRelocationIT.java | 66 ++++++++++++++ .../opensearch/remotestore/RemoteStoreIT.java | 3 + .../ReplicaToPrimaryPromotionIT.java | 64 ++++++++++++++ .../index/engine/InternalEngine.java | 28 ++++-- .../index/shard/IndexShardTests.java | 27 +++--- ...dTests.java => RemoteIndexShardTests.java} | 39 ++++++++- ...overyWithRemoteTranslogOnPrimaryTests.java | 12 +-- .../SegmentReplicationIndexShardTests.java | 40 ++++++++- .../TranslogTransferManagerTests.java | 5 +- ...enSearchIndexLevelReplicationTestCase.java | 17 +--- .../index/shard/IndexShardTestCase.java | 86 ++++++++++++++----- 12 files changed, 325 insertions(+), 76 deletions(-) create mode 100644 server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexPrimaryRelocationIT.java rename server/src/test/java/org/opensearch/index/shard/{SegmentReplicationWithRemoteIndexShardTests.java => RemoteIndexShardTests.java} (78%) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java index 32a10451a0dd3..e9962706bcd39 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java @@ -56,14 +56,16 @@ public class IndexPrimaryRelocationIT extends OpenSearchIntegTestCase { private static final int RELOCATION_COUNT = 15; + public void setup() {} + + public Settings indexSettings() { + return Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0).build(); + } + public void testPrimaryRelocationWhileIndexing() throws Exception { internalCluster().ensureAtLeastNumDataNodes(randomIntBetween(2, 3)); - client().admin() - .indices() - .prepareCreate("test") - .setSettings(Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0)) - .setMapping("field", "type=text") - .get(); + setup(); + client().admin().indices().prepareCreate("test").setSettings(indexSettings()).setMapping("field", "type=text").get(); ensureGreen("test"); AtomicInteger numAutoGenDocs = new AtomicInteger(); final AtomicBoolean finished = new AtomicBoolean(false); diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexPrimaryRelocationIT.java new file mode 100644 index 0000000000000..a9482c8c19187 --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexPrimaryRelocationIT.java @@ -0,0 +1,66 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.remotestore; + +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; +import org.opensearch.indices.recovery.IndexPrimaryRelocationIT; +import org.opensearch.indices.replication.common.ReplicationType; +import org.opensearch.test.OpenSearchIntegTestCase; + +import java.nio.file.Path; + +import static org.opensearch.remotestore.RemoteStoreBaseIntegTestCase.remoteStoreClusterSettings; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; + +@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST) +public class RemoteIndexPrimaryRelocationIT extends IndexPrimaryRelocationIT { + + protected static final String REPOSITORY_NAME = "test-remote-store-repo"; + + protected Path absolutePath; + + public void setup() { + absolutePath = randomRepoPath().toAbsolutePath(); + assertAcked( + clusterAdmin().preparePutRepository(REPOSITORY_NAME).setType("fs").setSettings(Settings.builder().put("location", absolutePath)) + ); + } + + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put(remoteStoreClusterSettings(REPOSITORY_NAME, REPOSITORY_NAME, false)) + .build(); + } + + @Override + protected boolean addMockInternalEngine() { + return false; + } + + public Settings indexSettings() { + return Settings.builder() + .put(super.indexSettings()) + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .build(); + } + + @Override + protected Settings featureFlagSettings() { + return Settings.builder() + .put(super.featureFlagSettings()) + .put(FeatureFlags.REMOTE_STORE, "true") + .put(FeatureFlags.SEGMENT_REPLICATION_EXPERIMENTAL, "true") + .build(); + } +} diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreIT.java index 693c4113f8f3b..51ae37c3b4faa 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreIT.java @@ -40,6 +40,9 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.oneOf; import static org.hamcrest.Matchers.comparesEqualTo; +import static org.hamcrest.Matchers.comparesEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.oneOf; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java index 549b4985894a7..c73e7f603b09b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java @@ -11,17 +11,22 @@ import com.carrotsearch.randomizedtesting.RandomizedTest; import org.junit.Before; import org.opensearch.action.admin.indices.close.CloseIndexResponse; +import org.opensearch.action.index.IndexResponse; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.routing.IndexShardRoutingTable; import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.common.settings.Settings; +import org.opensearch.core.rest.RestStatus; import org.opensearch.test.BackgroundIndexer; import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; import java.util.Locale; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -116,4 +121,63 @@ public void testPromoteReplicaToPrimary() throws Exception { refresh(indexName); assertHitCount(client().prepareSearch(indexName).setSize(0).get(), numOfDocs); } + + public void testFailoverWhileIndexing() throws Exception { + internalCluster().startNode(); + internalCluster().startNode(); + final String indexName = randomAlphaOfLength(5).toLowerCase(Locale.ROOT); + shard_count = scaledRandomIntBetween(1, 5); + createIndex(indexName); + ensureGreen(indexName); + int docCount = scaledRandomIntBetween(20, 50); + final int indexDocAfterFailover = scaledRandomIntBetween(20, 50); + AtomicInteger numAutoGenDocs = new AtomicInteger(); + CountDownLatch latch = new CountDownLatch(1); + final AtomicBoolean finished = new AtomicBoolean(false); + Thread indexingThread = new Thread(() -> { + int docsAfterFailover = 0; + while (finished.get() == false && numAutoGenDocs.get() < docCount) { + IndexResponse indexResponse = internalCluster().clusterManagerClient() + .prepareIndex(indexName) + .setSource("field", numAutoGenDocs.get()) + .get(); + + if (indexResponse.status() == RestStatus.CREATED || indexResponse.status() == RestStatus.ACCEPTED) { + numAutoGenDocs.incrementAndGet(); + if (numAutoGenDocs.get() == docCount / 2) { + if (random().nextInt(3) == 0) { + refresh(indexName); + } else if (random().nextInt(2) == 0) { + flush(indexName); + } + // Node is killed on this + latch.countDown(); + } else if (numAutoGenDocs.get() > docCount / 2) { + docsAfterFailover++; + if (docsAfterFailover == indexDocAfterFailover) { + finished.set(true); + } + } + } + } + logger.debug("Done indexing"); + }); + indexingThread.start(); + latch.await(); + + ClusterState state = client(internalCluster().getClusterManagerName()).admin().cluster().prepareState().get().getState(); + final int numShards = state.metadata().index(indexName).getNumberOfShards(); + final ShardRouting primaryShard = state.routingTable().index(indexName).shard(randomIntBetween(0, numShards - 1)).primaryShard(); + final DiscoveryNode randomNode = state.nodes().resolveNode(primaryShard.currentNodeId()); + + // stop the random data node, all remaining shards are promoted to primaries + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(randomNode.getName())); + ensureYellowAndNoInitializingShards(indexName); + indexingThread.join(); + refresh(indexName); + assertHitCount( + client(internalCluster().getClusterManagerName()).prepareSearch(indexName).setSize(0).setTrackTotalHits(true).get(), + numAutoGenDocs.get() + ); + } } diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index 77d63dfaade54..028298f662e7b 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -710,6 +710,7 @@ private OpVsLuceneDocStatus compareOpToLuceneDocBasedOnSeqNo(final Operation op) final OpVsLuceneDocStatus status; VersionValue versionValue = getVersionFromMap(op.uid().bytes()); assert incrementVersionLookup(); + boolean segRepEnabled = engineConfig.getIndexSettings().isSegRepEnabled(); if (versionValue != null) { status = compareOpToVersionMapOnSeqNo(op.id(), op.seqNo(), op.primaryTerm(), versionValue); } else { @@ -722,10 +723,8 @@ private OpVsLuceneDocStatus compareOpToLuceneDocBasedOnSeqNo(final Operation op) } else if (op.seqNo() > docAndSeqNo.seqNo) { status = OpVsLuceneDocStatus.OP_NEWER; } else if (op.seqNo() == docAndSeqNo.seqNo) { - assert localCheckpointTracker.hasProcessed(op.seqNo()) : "local checkpoint tracker is not updated seq_no=" - + op.seqNo() - + " id=" - + op.id(); + assert localCheckpointTracker.hasProcessed(op.seqNo()) || segRepEnabled + : "local checkpoint tracker is not updated seq_no=" + op.seqNo() + " id=" + op.id(); status = OpVsLuceneDocStatus.OP_STALE_OR_EQUAL; } else { status = OpVsLuceneDocStatus.OP_STALE_OR_EQUAL; @@ -927,6 +926,7 @@ public IndexResult index(Index index) throws IOException { plan.currentNotFoundOrDeleted ); } + } if (index.origin().isFromTranslog() == false) { final Translog.Location location; @@ -1005,10 +1005,18 @@ protected final IndexingStrategy planIndexingAsNonPrimary(Index index) throws IO assert maxSeqNoOfUpdatesOrDeletes < index.seqNo() : index.seqNo() + ">=" + maxSeqNoOfUpdatesOrDeletes; plan = IndexingStrategy.optimizedAppendOnly(index.version(), 0); } else { + boolean segRepEnabled = engineConfig.getIndexSettings().isSegRepEnabled(); versionMap.enforceSafeAccess(); final OpVsLuceneDocStatus opVsLucene = compareOpToLuceneDocBasedOnSeqNo(index); if (opVsLucene == OpVsLuceneDocStatus.OP_STALE_OR_EQUAL) { - plan = IndexingStrategy.processAsStaleOp(index.version()); + if (segRepEnabled) { + // For segrep based indices, we can't completely rely on localCheckpointTracker + // as the preserved checkpoint may not have all the operations present in lucene + // we don't need to index it again as stale op as it would create multiple documents for same seq no + plan = IndexingStrategy.processButSkipLucene(false, index.version()); + } else { + plan = IndexingStrategy.processAsStaleOp(index.version()); + } } else { plan = IndexingStrategy.processNormally(opVsLucene == OpVsLuceneDocStatus.LUCENE_DOC_NOT_FOUND, index.version(), 0); } @@ -1442,9 +1450,17 @@ protected final DeletionStrategy planDeletionAsNonPrimary(Delete delete) throws // See testRecoveryWithOutOfOrderDelete for an example of peer recovery plan = DeletionStrategy.processButSkipLucene(false, delete.version()); } else { + boolean segRepEnabled = engineConfig.getIndexSettings().isSegRepEnabled(); final OpVsLuceneDocStatus opVsLucene = compareOpToLuceneDocBasedOnSeqNo(delete); if (opVsLucene == OpVsLuceneDocStatus.OP_STALE_OR_EQUAL) { - plan = DeletionStrategy.processAsStaleOp(delete.version()); + if (segRepEnabled) { + // For segrep based indices, we can't completely rely on localCheckpointTracker + // as the preserved checkpoint may not have all the operations present in lucene + // we don't need to index it again as stale op as it would create multiple documents for same seq no + plan = DeletionStrategy.processButSkipLucene(false, delete.version()); + } else { + plan = DeletionStrategy.processAsStaleOp(delete.version()); + } } else { plan = DeletionStrategy.processNormally(opVsLucene == OpVsLuceneDocStatus.LUCENE_DOC_NOT_FOUND, delete.version(), 0); } diff --git a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java index 96fa53fbf0fc2..f1ae4b16d9bad 100644 --- a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java @@ -36,8 +36,8 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.Term; import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.Term; import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; @@ -50,8 +50,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Constants; import org.junit.Assert; -import org.opensearch.common.io.PathUtils; -import org.opensearch.core.Assertions; import org.opensearch.OpenSearchException; import org.opensearch.Version; import org.opensearch.action.ActionListener; @@ -77,11 +75,12 @@ import org.opensearch.common.Randomness; import org.opensearch.common.Strings; import org.opensearch.common.UUIDs; -import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.collect.Tuple; import org.opensearch.common.concurrent.GatedCloseable; +import org.opensearch.common.io.PathUtils; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.common.lease.Releasable; +import org.opensearch.common.lease.Releasables; import org.opensearch.common.settings.IndexScopedSettings; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; @@ -90,8 +89,9 @@ import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.common.lease.Releasable; -import org.opensearch.common.lease.Releasables; +import org.opensearch.core.Assertions; +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.index.shard.ShardId; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; @@ -106,8 +106,8 @@ import org.opensearch.index.engine.EngineTestCase; import org.opensearch.index.engine.InternalEngine; import org.opensearch.index.engine.InternalEngineFactory; -import org.opensearch.index.engine.NRTReplicationEngineFactory; import org.opensearch.index.engine.NRTReplicationEngine; +import org.opensearch.index.engine.NRTReplicationEngineFactory; import org.opensearch.index.engine.ReadOnlyEngine; import org.opensearch.index.fielddata.FieldDataStats; import org.opensearch.index.fielddata.IndexFieldData; @@ -168,6 +168,7 @@ import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -192,7 +193,7 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; -import java.util.Collection; + import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -217,8 +218,8 @@ import static org.mockito.Mockito.mock; import static org.opensearch.cluster.routing.TestShardRouting.newShardRouting; import static org.opensearch.common.lucene.Lucene.cleanLuceneIndex; -import static org.opensearch.core.xcontent.ToXContent.EMPTY_PARAMS; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.core.xcontent.ToXContent.EMPTY_PARAMS; import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.opensearch.test.hamcrest.RegexMatcher.matches; @@ -2886,13 +2887,14 @@ public void testCommitLevelRestoreShardFromRemoteStore() throws IOException { } public void testRestoreShardFromRemoteStore(boolean performFlush) throws IOException { + String remoteStorePath = createTempDir().toString(); IndexShard target = newStartedShard( true, Settings.builder() .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) .put(IndexMetadata.SETTING_REMOTE_STORE_ENABLED, true) - .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, "temp-fs") - .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, "temp-fs") + .put(IndexMetadata.SETTING_REMOTE_SEGMENT_STORE_REPOSITORY, remoteStorePath + "__test") + .put(IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY, remoteStorePath + "__test") .build(), new InternalEngineFactory() ); @@ -2957,7 +2959,6 @@ public void testRestoreShardFromRemoteStore(boolean performFlush) throws IOExcep final PlainActionFuture future = PlainActionFuture.newFuture(); target.restoreFromRemoteStore(future); target.remoteStore().decRef(); - assertTrue(future.actionGet()); assertDocs(target, "1", "2"); diff --git a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationWithRemoteIndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/RemoteIndexShardTests.java similarity index 78% rename from server/src/test/java/org/opensearch/index/shard/SegmentReplicationWithRemoteIndexShardTests.java rename to server/src/test/java/org/opensearch/index/shard/RemoteIndexShardTests.java index b15d8b66fca55..a01169480de0b 100644 --- a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationWithRemoteIndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RemoteIndexShardTests.java @@ -19,10 +19,12 @@ import java.io.IOException; import java.util.List; +import java.util.concurrent.CountDownLatch; import static org.hamcrest.Matchers.equalTo; +import static org.opensearch.index.engine.EngineTestCase.assertAtMostOneLuceneDocumentPerSequenceNumber; -public class SegmentReplicationWithRemoteIndexShardTests extends SegmentReplicationIndexShardTests { +public class RemoteIndexShardTests extends SegmentReplicationIndexShardTests { private static final String REPOSITORY_NAME = "temp-fs"; private static final Settings settings = Settings.builder() @@ -135,4 +137,39 @@ public void testNRTReplicaWithRemoteStorePromotedAsPrimary(boolean performFlushF } } } + + public void testNoDuplicateSeqNo() throws Exception { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT).build(); + ReplicationGroup shards = createGroup(1, settings, indexMapping, new NRTReplicationEngineFactory(), createTempDir()); + final IndexShard primaryShard = shards.getPrimary(); + final IndexShard replicaShard = shards.getReplicas().get(0); + shards.startPrimary(); + shards.startAll(); + shards.indexDocs(10); + replicateSegments(primaryShard, shards.getReplicas()); + + flushShard(primaryShard); + shards.indexDocs(10); + replicateSegments(primaryShard, shards.getReplicas()); + + shards.indexDocs(10); + primaryShard.refresh("test"); + replicateSegments(primaryShard, shards.getReplicas()); + + CountDownLatch latch = new CountDownLatch(1); + shards.promoteReplicaToPrimary(replicaShard, (shard, listener) -> { + try { + assertAtMostOneLuceneDocumentPerSequenceNumber(replicaShard.getEngine()); + } catch (IOException e) { + throw new RuntimeException(e); + } + latch.countDown(); + }); + latch.await(); + for (IndexShard shard : shards) { + if (shard != null) { + closeShard(shard, false); + } + } + } } diff --git a/server/src/test/java/org/opensearch/index/shard/ReplicaRecoveryWithRemoteTranslogOnPrimaryTests.java b/server/src/test/java/org/opensearch/index/shard/ReplicaRecoveryWithRemoteTranslogOnPrimaryTests.java index 20b3dfc0f93a6..d71a290c9619a 100644 --- a/server/src/test/java/org/opensearch/index/shard/ReplicaRecoveryWithRemoteTranslogOnPrimaryTests.java +++ b/server/src/test/java/org/opensearch/index/shard/ReplicaRecoveryWithRemoteTranslogOnPrimaryTests.java @@ -23,7 +23,6 @@ import org.opensearch.index.mapper.MapperService; import org.opensearch.index.replication.OpenSearchIndexLevelReplicationTestCase; import org.opensearch.index.seqno.SequenceNumbers; -import org.opensearch.index.store.Store; import org.opensearch.index.translog.WriteOnlyTranslogManager; import org.opensearch.indices.recovery.RecoveryTarget; import org.opensearch.indices.replication.common.ReplicationType; @@ -76,15 +75,6 @@ public void testStartSequenceForReplicaRecovery() throws Exception { int moreDocs = shards.indexDocs(randomIntBetween(20, 100)); shards.flush(); - - final ShardRouting replicaRouting2 = newShardRouting( - replicaRouting.shardId(), - replicaRouting.currentNodeId(), - false, - ShardRoutingState.INITIALIZING, - RecoverySource.PeerRecoverySource.INSTANCE - ); - Store remoteStore = createRemoteStore(remoteDir, replicaRouting2, newIndexMetadata); IndexShard newReplicaShard = newShard( newShardRouting( replicaRouting.shardId(), @@ -102,7 +92,7 @@ public void testStartSequenceForReplicaRecovery() throws Exception { replica.getGlobalCheckpointSyncer(), replica.getRetentionLeaseSyncer(), EMPTY_EVENT_LISTENER, - remoteStore + remoteDir ); shards.addReplica(newReplicaShard); AtomicBoolean assertDone = new AtomicBoolean(false); diff --git a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java index 12b7341349442..9c02f430e4e6d 100644 --- a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java @@ -23,12 +23,12 @@ import org.opensearch.cluster.routing.ShardRoutingHelper; import org.opensearch.common.collect.Tuple; import org.opensearch.common.concurrent.GatedCloseable; +import org.opensearch.common.lease.Releasable; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.CancellableThreads; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.common.lease.Releasable; import org.opensearch.index.IndexSettings; import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.InternalEngineFactory; @@ -50,8 +50,8 @@ import org.opensearch.indices.replication.SegmentReplicationState; import org.opensearch.indices.replication.SegmentReplicationTarget; import org.opensearch.indices.replication.SegmentReplicationTargetService; -import org.opensearch.indices.replication.checkpoint.SegmentReplicationCheckpointPublisher; import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; +import org.opensearch.indices.replication.checkpoint.SegmentReplicationCheckpointPublisher; import org.opensearch.indices.replication.common.CopyState; import org.opensearch.indices.replication.common.ReplicationFailedException; import org.opensearch.indices.replication.common.ReplicationListener; @@ -82,6 +82,7 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import static org.opensearch.index.engine.EngineTestCase.assertAtMostOneLuceneDocumentPerSequenceNumber; public class SegmentReplicationIndexShardTests extends OpenSearchIndexLevelReplicationTestCase { @@ -673,6 +674,41 @@ protected SegmentReplicationTargetService newTargetService(SegmentReplicationSou ); } + public void testNoDuplicateSeqNo() throws Exception { + Settings settings = Settings.builder().put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT).build(); + ReplicationGroup shards = createGroup(1, settings, indexMapping, new NRTReplicationEngineFactory(), createTempDir()); + final IndexShard primaryShard = shards.getPrimary(); + final IndexShard replicaShard = shards.getReplicas().get(0); + shards.startPrimary(); + shards.startAll(); + shards.indexDocs(10); + replicateSegments(primaryShard, shards.getReplicas()); + + flushShard(primaryShard); + shards.indexDocs(10); + replicateSegments(primaryShard, shards.getReplicas()); + + shards.indexDocs(10); + primaryShard.refresh("test"); + replicateSegments(primaryShard, shards.getReplicas()); + + CountDownLatch latch = new CountDownLatch(1); + shards.promoteReplicaToPrimary(replicaShard, (shard, listener) -> { + try { + assertAtMostOneLuceneDocumentPerSequenceNumber(replicaShard.getEngine()); + } catch (IOException e) { + throw new RuntimeException(e); + } + latch.countDown(); + }); + latch.await(); + for (IndexShard shard : shards) { + if (shard != null) { + closeShard(shard, false); + } + } + } + /** * Assert persisted and searchable doc counts. This method should not be used while docs are concurrently indexed because * it asserts point in time seqNos are relative to the doc counts. diff --git a/server/src/test/java/org/opensearch/index/translog/transfer/TranslogTransferManagerTests.java b/server/src/test/java/org/opensearch/index/translog/transfer/TranslogTransferManagerTests.java index 3d622d6bdf8b8..99937faa18584 100644 --- a/server/src/test/java/org/opensearch/index/translog/transfer/TranslogTransferManagerTests.java +++ b/server/src/test/java/org/opensearch/index/translog/transfer/TranslogTransferManagerTests.java @@ -16,8 +16,8 @@ import org.opensearch.common.blobstore.BlobMetadata; import org.opensearch.common.blobstore.BlobPath; import org.opensearch.common.blobstore.BlobStore; -import org.opensearch.common.blobstore.support.PlainBlobMetadata; import org.opensearch.common.blobstore.stream.write.WritePriority; +import org.opensearch.common.blobstore.support.PlainBlobMetadata; import org.opensearch.core.index.Index; import org.opensearch.core.index.shard.ShardId; import org.opensearch.index.translog.Translog; @@ -44,7 +44,6 @@ import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyMap; import static org.mockito.ArgumentMatchers.anySet; -import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doNothing; @@ -432,7 +431,7 @@ public void testDeleteStaleTranslogMetadata() { .listAllInSortedOrderAsync( eq(ThreadPool.Names.REMOTE_PURGE), any(BlobPath.class), - anyString(), + eq(TranslogTransferMetadata.METADATA_PREFIX), anyInt(), any(ActionListener.class) ); diff --git a/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java index 278847e56e65f..e8c5203129374 100644 --- a/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java @@ -75,12 +75,12 @@ import org.opensearch.common.collect.Iterators; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.common.lease.Releasable; +import org.opensearch.common.lease.Releasables; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.common.lease.Releasable; -import org.opensearch.common.lease.Releasables; import org.opensearch.core.index.Index; import org.opensearch.index.IndexSettings; import org.opensearch.index.engine.DocIdSeqNoAndSource; @@ -98,7 +98,6 @@ import org.opensearch.index.shard.PrimaryReplicaSyncer; import org.opensearch.core.index.shard.ShardId; import org.opensearch.index.shard.ShardPath; -import org.opensearch.index.store.Store; import org.opensearch.index.translog.Translog; import org.opensearch.indices.recovery.RecoveryState; import org.opensearch.indices.recovery.RecoveryTarget; @@ -253,10 +252,6 @@ protected ReplicationGroup(final IndexMetadata indexMetadata) throws IOException protected ReplicationGroup(final IndexMetadata indexMetadata, Path remotePath) throws IOException { final ShardRouting primaryRouting = this.createShardRouting("s0", true); - Store remoteStore = null; - if (remotePath != null) { - remoteStore = createRemoteStore(remotePath, primaryRouting, indexMetadata); - } primary = newShard( primaryRouting, indexMetadata, @@ -264,7 +259,7 @@ protected ReplicationGroup(final IndexMetadata indexMetadata, Path remotePath) t getEngineFactory(primaryRouting), () -> {}, retentionLeaseSyncer, - remoteStore + remotePath ); replicas = new CopyOnWriteArrayList<>(); this.indexMetadata = indexMetadata; @@ -390,10 +385,6 @@ public IndexShard addReplica() throws IOException { public IndexShard addReplica(Path remotePath) throws IOException { final ShardRouting replicaRouting = createShardRouting("s" + replicaId.incrementAndGet(), false); - Store remoteStore = null; - if (remotePath != null) { - remoteStore = createRemoteStore(remotePath, replicaRouting, indexMetadata); - } final IndexShard replica = newShard( replicaRouting, indexMetadata, @@ -401,7 +392,7 @@ public IndexShard addReplica(Path remotePath) throws IOException { getEngineFactory(replicaRouting), () -> {}, retentionLeaseSyncer, - remoteStore + remotePath ); addReplica(replica); return replica; diff --git a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java index 66e5459cfea3b..29ecc6b376ad0 100644 --- a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java @@ -48,6 +48,7 @@ import org.opensearch.action.support.replication.TransportReplicationAction; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.metadata.MappingMetadata; +import org.opensearch.cluster.metadata.RepositoryMetadata; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.node.DiscoveryNodeRole; import org.opensearch.cluster.routing.IndexShardRoutingTable; @@ -66,7 +67,6 @@ import org.opensearch.common.blobstore.BlobStore; import org.opensearch.common.blobstore.fs.FsBlobContainer; import org.opensearch.common.blobstore.fs.FsBlobStore; -import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.io.PathUtils; import org.opensearch.common.lease.Releasable; @@ -77,9 +77,12 @@ import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.io.IOUtils; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.core.index.shard.ShardId; +import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.index.Index; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.env.Environment; import org.opensearch.env.NodeEnvironment; +import org.opensearch.env.TestEnvironment; import org.opensearch.index.IndexSettings; import org.opensearch.index.MapperTestUtils; import org.opensearch.index.VersionType; @@ -142,7 +145,9 @@ import org.opensearch.repositories.RepositoriesService; import org.opensearch.repositories.Repository; import org.opensearch.repositories.blobstore.BlobStoreRepository; +import org.opensearch.repositories.blobstore.BlobStoreTestUtil; import org.opensearch.repositories.blobstore.OpenSearchBlobStoreRepositoryIntegTestCase; +import org.opensearch.repositories.fs.FsRepository; import org.opensearch.snapshots.Snapshot; import org.opensearch.test.DummyShardLock; import org.opensearch.test.OpenSearchTestCase; @@ -460,7 +465,7 @@ protected IndexShard newShard( @Nullable EngineFactory engineFactory, Runnable globalCheckpointSyncer, RetentionLeaseSyncer retentionLeaseSyncer, - Store remoteStore, + Path path, IndexingOperationListener... listeners ) throws IOException { // add node id as name to settings for proper logging @@ -478,7 +483,7 @@ protected IndexShard newShard( globalCheckpointSyncer, retentionLeaseSyncer, EMPTY_EVENT_LISTENER, - remoteStore, + path, listeners ); } @@ -506,7 +511,7 @@ protected IndexShard newShard( Runnable globalCheckpointSyncer, RetentionLeaseSyncer retentionLeaseSyncer, IndexEventListener indexEventListener, - Store remoteStore, + Path remotePath, IndexingOperationListener... listeners ) throws IOException { return newShard( @@ -521,7 +526,7 @@ protected IndexShard newShard( retentionLeaseSyncer, indexEventListener, SegmentReplicationCheckpointPublisher.EMPTY, - remoteStore, + remotePath, listeners ); } @@ -550,7 +555,7 @@ protected IndexShard newShard( RetentionLeaseSyncer retentionLeaseSyncer, IndexEventListener indexEventListener, SegmentReplicationCheckpointPublisher checkpointPublisher, - @Nullable Store remoteStore, + @Nullable Path remotePath, IndexingOperationListener... listeners ) throws IOException { final Settings nodeSettings = Settings.builder().put("node.name", routing.currentNodeId()).build(); @@ -578,26 +583,32 @@ protected IndexShard newShard( Collections.emptyList(), clusterSettings ); - + Store remoteStore = null; RemoteRefreshSegmentPressureService remoteRefreshSegmentPressureService = null; + RepositoriesService mockRepoSvc = mock(RepositoriesService.class); + if (indexSettings.isRemoteStoreEnabled()) { - if (remoteStore == null) { - Path remoteStorePath; - String remoteStoreRepository = indexSettings.getRemoteStoreRepository(); - if (remoteStoreRepository != null && remoteStoreRepository.endsWith("__test")) { - remoteStorePath = PathUtils.get(remoteStoreRepository.replace("__test", "")); - } else { - remoteStorePath = createTempDir(); - } - remoteStore = createRemoteStore(remoteStorePath, routing, indexMetadata); + String remoteStoreRepository = indexSettings.getRemoteStoreRepository(); + // remote path via setting a repository . This is a hack used for shards are created using reset . + // since we can't get remote path from IndexShard directly, we are using repository to store it . + if (remoteStoreRepository != null && remoteStoreRepository.endsWith("__test")) { + remotePath = PathUtils.get(remoteStoreRepository.replace("__test", "")); + } else if (remotePath == null) { + remotePath = createTempDir(); } + + remoteStore = createRemoteStore(remotePath, routing, indexMetadata); + remoteRefreshSegmentPressureService = new RemoteRefreshSegmentPressureService(clusterService, indexSettings.getSettings()); + BlobStoreRepository repo = createRepository(remotePath); + when(mockRepoSvc.repository(any())).thenAnswer(invocationOnMock -> repo); } final BiFunction translogFactorySupplier = (settings, shardRouting) -> { if (settings.isRemoteTranslogStoreEnabled() && shardRouting.primary()) { + return new RemoteBlobStoreInternalTranslogFactory( - this::createRepositoriesService, + () -> mockRepoSvc, threadPool, settings.getRemoteStoreTranslogRepository() ); @@ -643,6 +654,39 @@ protected IndexShard newShard( return indexShard; } + private BlobStoreRepository createRepository(Path path) { + Settings settings = Settings.builder().put("location", path).build(); + RepositoryMetadata repositoryMetadata = new RepositoryMetadata(randomAlphaOfLength(10), FsRepository.TYPE, settings); + final ClusterService clusterService = BlobStoreTestUtil.mockClusterService(repositoryMetadata); + final FsRepository repository = new FsRepository( + repositoryMetadata, + createEnvironment(path), + xContentRegistry(), + clusterService, + new RecoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) + ) { + @Override + protected void assertSnapshotOrGenericThread() { + // eliminate thread name check as we create repo manually + } + }; + clusterService.addStateApplier(event -> repository.updateState(event.state())); + // Apply state once to initialize repo properly like RepositoriesService would + repository.updateState(clusterService.state()); + repository.start(); + return repository; + } + + private Environment createEnvironment(Path path) { + Path home = createTempDir(); + return TestEnvironment.newEnvironment( + Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), home.toAbsolutePath()) + .put(Environment.PATH_REPO_SETTING.getKey(), path.toAbsolutePath()) + .build() + ); + } + protected RepositoriesService createRepositoriesService() { RepositoriesService repositoriesService = Mockito.mock(RepositoriesService.class); BlobStoreRepository repository = Mockito.mock(BlobStoreRepository.class); @@ -724,7 +768,7 @@ protected IndexShard reinitShard(IndexShard current, ShardRouting routing, Index current.indexSettings.getIndexMetadata(), current.engineFactory, current.engineConfigFactory, - current.remoteStore(), + null, listeners ); } @@ -743,7 +787,7 @@ protected IndexShard reinitShard( IndexMetadata indexMetadata, EngineFactory engineFactory, EngineConfigFactory engineConfigFactory, - Store remoteStore, + Path remotePath, IndexingOperationListener... listeners ) throws IOException { closeShards(current); @@ -758,7 +802,7 @@ protected IndexShard reinitShard( current.getGlobalCheckpointSyncer(), current.getRetentionLeaseSyncer(), EMPTY_EVENT_LISTENER, - remoteStore, + remotePath, listeners ); } From ff6a8851e64b09f9fd21a960ed19eb6eacf06e2f Mon Sep 17 00:00:00 2001 From: Ashish Date: Thu, 3 Aug 2023 20:02:43 +0530 Subject: [PATCH 52/75] Fix flaky test testStatsOnShardUnassigned in RemoteStoreStatsIT (#9057) --------- Signed-off-by: Ashish Singh --- .../opensearch/remotestore/RemoteStoreStatsIT.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java index 1c7f14701b3e7..bd546a01b0b88 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java @@ -9,6 +9,7 @@ package org.opensearch.remotestore; import org.junit.Before; +import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreRequest; import org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStats; import org.opensearch.action.admin.cluster.remotestore.stats.RemoteStoreStatsRequestBuilder; @@ -22,6 +23,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.index.IndexSettings; import org.opensearch.index.remote.RemoteSegmentTransferTracker; +import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; import java.io.IOException; @@ -443,12 +445,19 @@ public void testStatsOnShardUnassigned() throws IOException { createIndex(INDEX_NAME, remoteStoreIndexSettings(2, 1)); ensureGreen(INDEX_NAME); indexDocs(); - int dataNodeCountBeforeStop = client().admin().cluster().prepareHealth().get().getNumberOfDataNodes(); - internalCluster().stopRandomDataNode(); + ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().get(); + int dataNodeCountBeforeStop = clusterHealthResponse.getNumberOfDataNodes(); + int nodeCount = clusterHealthResponse.getNumberOfNodes(); + String nodeToBeStopped = randomBoolean() ? primaryNodeName(INDEX_NAME) : replicaNodeName(INDEX_NAME); + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodeToBeStopped)); + ensureYellowAndNoInitializingShards(INDEX_NAME); + ensureStableCluster(nodeCount - 1); RemoteStoreStatsResponse response = client().admin().cluster().prepareRemoteStoreStats(INDEX_NAME, "0").get(); int dataNodeCountAfterStop = client().admin().cluster().prepareHealth().get().getNumberOfDataNodes(); assertEquals(dataNodeCountBeforeStop, response.getTotalShards()); assertEquals(dataNodeCountAfterStop, response.getSuccessfulShards()); + // Indexing docs to ensure that the primary has started + indexSingleDoc(INDEX_NAME); } public void testStatsOnRemoteStoreRestore() throws IOException { From c3acf47b4d643c3a3ab86dc3b07fe722ac6e4982 Mon Sep 17 00:00:00 2001 From: Marc Handalian Date: Thu, 3 Aug 2023 09:47:30 -0700 Subject: [PATCH 53/75] Fix test testDropPrimaryDuringReplication and clean up ReplicationCheckpoint validation (#8889) * Fix test testDropPrimaryDuringReplication and clean up ReplicationCheckpoint validation. This test is now occasionally failing with replicas having 0 documents. This occurs in a couple of ways: 1. After dropping the old primary the new primary is not publishing a checkpoint to replicas unless it indexes docs from translog after flipping to primary mode. If there is nothing to index, it will not publish a checkpoint, but the other replica could have never sync'd with the original primary and be left out of date. - This PR fixes this by force publishing a checkpoint after the new primary flips to primary mode. 2. The replica receives a checkpoint post failover and cancels its sync with the former primary that is still active, recognizing a primary term bump. However this cancellation is async and immediately starting a new replication event could fail as its still replicating. - This PR fixes this by attempting to process the latest received checkpoint on failure, if the shard is not failed and still behind. This PR also introduces a few changes to ensure the accuracy of the ReplicationCheckpoint tracked on primary & replicas. - Ensure the checkpoint stored in SegmentReplicationTarget is the checkpoint passed from the primary and not locally computed. This ensures checks for primary term are accurate and not using a locally compued operationPrimaryTerm. - Introduces a refresh listener for both primary & replica to update the ReplicationCheckpoint and store it in replicationTracker post refresh rather than redundantly computing when accessed. - Removes unnecessary onCheckpointPublished method used to start replication timers manually. This will happen automatically on primaries once its local cp is updated. Signed-off-by: Marc Handalian * Handle NoSuchFileException when attempting to delete decref'd files. To avoid divergent logic with remote store, we always incref/decref the segmentinfos.files(true) which includes the segments_n file. Decref to 0 will attempt to delete the file from the store and its possible this _n file does not yet exist. This change will ignore if we get a noSuchFile while attempting to delete. Signed-off-by: Marc Handalian * Add more unit tests. Signed-off-by: Marc Handalian * Clean up IndexShardTests.testCheckpointReffreshListenerWithNull Signed-off-by: Marc Handalian * Remove unnecessary catch for NoSuchFileException. Signed-off-by: Marc Handalian * Add another test for non segrep. Signed-off-by: Marc Handalian * PR Feedback. Signed-off-by: Marc Handalian * re-compute replication checkpoint on primary promotion. Signed-off-by: Marc Handalian --------- Signed-off-by: Marc Handalian --- .../replication/SegmentReplicationIT.java | 10 +- .../index/engine/NRTReplicationEngine.java | 15 +++ .../opensearch/index/shard/IndexShard.java | 69 +++++++---- .../shard/RemoteStoreRefreshListener.java | 5 +- .../replication/SegmentReplicationTarget.java | 20 +++- .../SegmentReplicationTargetService.java | 107 +++++++++++------- ...SegmentReplicationCheckpointPublisher.java | 1 - .../engine/NRTReplicationEngineTests.java | 72 ++++++++++++ .../index/shard/IndexShardTests.java | 51 +-------- .../SegmentReplicationIndexShardTests.java | 79 ++++++++++++- ...licationWithNodeToNodeIndexShardTests.java | 16 ++- .../SegmentReplicationTargetServiceTests.java | 90 +++++++++------ .../SegmentReplicationTargetTests.java | 14 +-- .../recovery/ReplicationCollectionTests.java | 2 + .../index/shard/IndexShardTestCase.java | 60 +++++++++- 15 files changed, 435 insertions(+), 176 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java index 08186bf3f9362..72b6a0296e3bb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java @@ -44,6 +44,8 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.ShardRoutingState; import org.opensearch.cluster.routing.allocation.command.CancelAllocationCommand; +import org.opensearch.common.collect.Tuple; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; import org.opensearch.common.settings.Settings; @@ -60,6 +62,7 @@ import org.opensearch.index.shard.IndexShard; import org.opensearch.core.index.shard.ShardId; import org.opensearch.indices.recovery.FileChunkRequest; +import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.search.SearchService; import org.opensearch.search.builder.PointInTimeBuilder; @@ -983,8 +986,11 @@ public void testScrollCreatedOnReplica() throws Exception { ) ); final IndexShard replicaShard = getIndexShard(replica, INDEX_NAME); - final SegmentInfos segmentInfos = replicaShard.getLatestSegmentInfosAndCheckpoint().v1().get(); - final Collection snapshottedSegments = segmentInfos.files(false); + final Tuple, ReplicationCheckpoint> tuple = replicaShard.getLatestSegmentInfosAndCheckpoint(); + final Collection snapshottedSegments; + try (final GatedCloseable closeable = tuple.v1()) { + snapshottedSegments = closeable.get().files(false); + } // opens a scrolled query before a flush is called. // this is for testing scroll segment consistency between refresh and flush SearchResponse searchResponse = client(replica).prepareSearch() diff --git a/server/src/main/java/org/opensearch/index/engine/NRTReplicationEngine.java b/server/src/main/java/org/opensearch/index/engine/NRTReplicationEngine.java index e852658d7b3ba..6b09b8d86dc6c 100644 --- a/server/src/main/java/org/opensearch/index/engine/NRTReplicationEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/NRTReplicationEngine.java @@ -34,6 +34,7 @@ import java.io.Closeable; import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; @@ -445,6 +446,20 @@ protected SegmentInfos getLatestSegmentInfos() { return readerManager.getSegmentInfos(); } + @Override + public synchronized GatedCloseable getSegmentInfosSnapshot() { + // get reference to latest infos + final SegmentInfos latestSegmentInfos = getLatestSegmentInfos(); + // incref all files + try { + final Collection files = latestSegmentInfos.files(false); + store.incRefFileDeleter(files); + return new GatedCloseable<>(latestSegmentInfos, () -> store.decRefFileDeleter(files)); + } catch (IOException e) { + throw new EngineException(shardId, e.getMessage(), e); + } + } + protected LocalCheckpointTracker getLocalCheckpointTracker() { return localCheckpointTracker; } diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index 2b85193275a13..ace6ed56c007c 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -677,8 +677,19 @@ public void updateShardState( // this Shard's engine was read only, we need to update its engine before restoring local history from xlog. assert newRouting.primary() && currentRouting.primary() == false; resetEngineToGlobalCheckpoint(); + // It is possible an engine can open with a SegmentInfos on a higher gen but the reader does not refresh to + // trigger our refresh listener. + // Force update the checkpoint post engine reset. + updateReplicationCheckpoint(); } + replicationTracker.activatePrimaryMode(getLocalCheckpoint()); + if (indexSettings.isSegRepEnabled()) { + // force publish a checkpoint once in primary mode so that replicas not caught up to previous primary + // are brought up to date. + checkpointPublisher.publish(this, getLatestReplicationCheckpoint()); + } + ensurePeerRecoveryRetentionLeasesExist(); /* * If this shard was serving as a replica shard when another shard was promoted to primary then @@ -1551,15 +1562,7 @@ public GatedCloseable acquireSafeIndexCommit() throws EngineExcepti * @return EMPTY checkpoint before the engine is opened and null for non-segrep enabled indices */ public ReplicationCheckpoint getLatestReplicationCheckpoint() { - final Tuple, ReplicationCheckpoint> infosAndCheckpoint = getLatestSegmentInfosAndCheckpoint(); - if (infosAndCheckpoint == null) { - return null; - } - try (final GatedCloseable ignored = infosAndCheckpoint.v1()) { - return infosAndCheckpoint.v2(); - } catch (IOException e) { - throw new OpenSearchException("Error Closing SegmentInfos Snapshot", e); - } + return replicationTracker.getLatestReplicationCheckpoint(); } /** @@ -1573,13 +1576,11 @@ public ReplicationCheckpoint getLatestReplicationCheckpoint() { * */ public Tuple, ReplicationCheckpoint> getLatestSegmentInfosAndCheckpoint() { - if (indexSettings.isSegRepEnabled() == false) { - return null; - } + assert indexSettings.isSegRepEnabled(); Tuple, ReplicationCheckpoint> nullSegmentInfosEmptyCheckpoint = new Tuple<>( new GatedCloseable<>(null, () -> {}), - ReplicationCheckpoint.empty(shardId, getDefaultCodecName()) + getLatestReplicationCheckpoint() ); if (getEngineOrNull() == null) { @@ -1598,11 +1599,7 @@ public Tuple, ReplicationCheckpoint> getLatestSegme getOperationPrimaryTerm(), segmentInfos.getGeneration(), segmentInfos.getVersion(), - // TODO: Update replicas to compute length from SegmentInfos. Replicas do not yet incref segments with - // getSegmentInfosSnapshot, so computing length from SegmentInfos can cause issues. - shardRouting.primary() - ? store.getSegmentMetadataMap(segmentInfos).values().stream().mapToLong(StoreFileMetadata::length).sum() - : store.stats(StoreStats.UNKNOWN_RESERVED_BYTES).getSizeInBytes(), + store.getSegmentMetadataMap(segmentInfos).values().stream().mapToLong(StoreFileMetadata::length).sum(), getEngine().config().getCodec().getName() ) ); @@ -1858,10 +1855,6 @@ public void resetToWriteableEngine() throws IOException, InterruptedException, T indexShardOperationPermits.blockOperations(30, TimeUnit.MINUTES, () -> { resetEngineToGlobalCheckpoint(); }); } - public void onCheckpointPublished(ReplicationCheckpoint checkpoint) { - replicationTracker.setLatestReplicationCheckpoint(checkpoint); - } - /** * Wrapper for a non-closing reader * @@ -2342,6 +2335,11 @@ private void innerOpenEngineAndTranslog(LongSupplier globalCheckpointSupplier, b final Engine newEngine = engineFactory.newReadWriteEngine(config); onNewEngine(newEngine); currentEngineReference.set(newEngine); + + if (indexSettings.isSegRepEnabled()) { + // set initial replication checkpoints into tracker. + updateReplicationCheckpoint(); + } // We set active because we are now writing operations to the engine; this way, // we can flush if we go idle after some time and become inactive. active.set(true); @@ -3667,6 +3665,9 @@ private EngineConfig newEngineConfig(LongSupplier globalCheckpointSupplier) thro internalRefreshListener.clear(); internalRefreshListener.add(new RefreshMetricUpdater(refreshMetric)); + if (indexSettings.isSegRepEnabled()) { + internalRefreshListener.add(new ReplicationCheckpointUpdater()); + } if (this.checkpointPublisher != null && shardRouting.primary() && indexSettings.isSegRepLocalEnabled()) { internalRefreshListener.add(new CheckpointRefreshListener(this, this.checkpointPublisher)); } @@ -4471,6 +4472,30 @@ public void afterRefresh(boolean didRefresh) throws IOException { } } + /** + * Refresh listener to update the Shard's ReplicationCheckpoint post refresh. + */ + private class ReplicationCheckpointUpdater implements ReferenceManager.RefreshListener { + @Override + public void beforeRefresh() throws IOException {} + + @Override + public void afterRefresh(boolean didRefresh) throws IOException { + if (didRefresh) { + updateReplicationCheckpoint(); + } + } + } + + private void updateReplicationCheckpoint() { + final Tuple, ReplicationCheckpoint> tuple = getLatestSegmentInfosAndCheckpoint(); + try (final GatedCloseable ignored = tuple.v1()) { + replicationTracker.setLatestReplicationCheckpoint(tuple.v2()); + } catch (IOException e) { + throw new OpenSearchException("Error Closing SegmentInfos Snapshot", e); + } + } + private EngineConfig.TombstoneDocSupplier tombstoneDocSupplier() { final RootObjectMapper.Builder noopRootMapper = new RootObjectMapper.Builder("__noop"); final DocumentMapper noopDocumentMapper = mapperService != null diff --git a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java index 8dd0c8b9d4405..d56054dd1c42b 100644 --- a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java +++ b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java @@ -185,7 +185,6 @@ private synchronized boolean syncSegments() { return true; } ReplicationCheckpoint checkpoint = indexShard.getLatestReplicationCheckpoint(); - indexShard.onCheckpointPublished(checkpoint); beforeSegmentsSync(); long refreshTimeMs = segmentTracker.getLocalRefreshTimeMs(), refreshClockTimeMs = segmentTracker.getLocalRefreshClockTimeMs(); long refreshSeqNo = segmentTracker.getLocalRefreshSeqNo(); @@ -207,6 +206,10 @@ private synchronized boolean syncSegments() { try (GatedCloseable segmentInfosGatedCloseable = indexShard.getSegmentInfosSnapshot()) { SegmentInfos segmentInfos = segmentInfosGatedCloseable.get(); + assert segmentInfos.getGeneration() == checkpoint.getSegmentsGen() : "SegmentInfos generation: " + + segmentInfos.getGeneration() + + " does not match metadata generation: " + + checkpoint.getSegmentsGen(); // Capture replication checkpoint before uploading the segments as upload can take some time and checkpoint can // move. long lastRefreshedCheckpoint = ((InternalEngine) indexShard.getEngine()).lastRefreshedCheckpoint(); diff --git a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java index c22701dfc94ce..3a84163bb979d 100644 --- a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java +++ b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java @@ -51,9 +51,14 @@ public class SegmentReplicationTarget extends ReplicationTarget { public final static String REPLICATION_PREFIX = "replication."; - public SegmentReplicationTarget(IndexShard indexShard, SegmentReplicationSource source, ReplicationListener listener) { + public SegmentReplicationTarget( + IndexShard indexShard, + ReplicationCheckpoint checkpoint, + SegmentReplicationSource source, + ReplicationListener listener + ) { super("replication_target", indexShard, new ReplicationLuceneIndex(), listener); - this.checkpoint = indexShard.getLatestReplicationCheckpoint(); + this.checkpoint = checkpoint; this.source = source; this.state = new SegmentReplicationState( indexShard.routingEntry(), @@ -90,12 +95,19 @@ public SegmentReplicationState state() { } public SegmentReplicationTarget retryCopy() { - return new SegmentReplicationTarget(indexShard, source, listener); + return new SegmentReplicationTarget(indexShard, checkpoint, source, listener); } @Override public String description() { - return String.format(Locale.ROOT, "Id:[%d] Shard:[%s] Source:[%s]", getId(), shardId(), source.getDescription()); + return String.format( + Locale.ROOT, + "Id:[%d] Checkpoint [%s] Shard:[%s] Source:[%s]", + getId(), + getCheckpoint(), + shardId(), + source.getDescription() + ); } @Override diff --git a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java index b41c9e09add45..84d6a722e572e 100644 --- a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java +++ b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java @@ -234,7 +234,7 @@ public synchronized void onNewCheckpoint(final ReplicationCheckpoint receivedChe logger.trace( () -> new ParameterizedMessage( "Ignoring new replication checkpoint - shard is currently replicating to checkpoint {}", - replicaShard.getLatestReplicationCheckpoint() + ongoingReplicationTarget.getCheckpoint() ) ); return; @@ -242,7 +242,7 @@ public synchronized void onNewCheckpoint(final ReplicationCheckpoint receivedChe } final Thread thread = Thread.currentThread(); if (replicaShard.shouldProcessCheckpoint(receivedCheckpoint)) { - startReplication(replicaShard, new SegmentReplicationListener() { + startReplication(replicaShard, receivedCheckpoint, new SegmentReplicationListener() { @Override public void onReplicationDone(SegmentReplicationState state) { logger.trace( @@ -280,6 +280,8 @@ public void onReplicationFailure( ); if (sendShardFailure == true) { failShard(e, replicaShard); + } else { + processLatestReceivedCheckpoint(replicaShard, thread); } } }); @@ -396,8 +398,24 @@ protected void updateLatestReceivedCheckpoint(ReplicationCheckpoint receivedChec } } - public SegmentReplicationTarget startReplication(final IndexShard indexShard, final SegmentReplicationListener listener) { - final SegmentReplicationTarget target = new SegmentReplicationTarget(indexShard, sourceFactory.get(indexShard), listener); + /** + * Start a round of replication and sync to at least the given checkpoint. + * @param indexShard - {@link IndexShard} replica shard + * @param checkpoint - {@link ReplicationCheckpoint} checkpoint to sync to + * @param listener - {@link ReplicationListener} + * @return {@link SegmentReplicationTarget} target event orchestrating the event. + */ + public SegmentReplicationTarget startReplication( + final IndexShard indexShard, + final ReplicationCheckpoint checkpoint, + final SegmentReplicationListener listener + ) { + final SegmentReplicationTarget target = new SegmentReplicationTarget( + indexShard, + checkpoint, + sourceFactory.get(indexShard), + listener + ); startReplication(target); return target; } @@ -529,50 +547,59 @@ private void forceReplication(ForceSyncRequest request, ActionListener new ParameterizedMessage( + "[shardId {}] [replication id {}] Force replication Sync complete to {}, timing data: {}", + shardId, + state.getReplicationId(), + indexShard.getLatestReplicationCheckpoint(), + state.getTimingData() + ) + ); + // Promote engine type for primary target + if (indexShard.recoveryState().getPrimary() == true) { + indexShard.resetToWriteableEngine(); + } else { + // Update the replica's checkpoint on primary's replication tracker. + updateVisibleCheckpoint(state.getReplicationId(), indexShard); + } + listener.onResponse(TransportResponse.Empty.INSTANCE); + } catch (Exception e) { + logger.error("Error while marking replication completed", e); + listener.onFailure(e); + } + } + + @Override + public void onReplicationFailure( + SegmentReplicationState state, + ReplicationFailedException e, + boolean sendShardFailure + ) { + logger.error( () -> new ParameterizedMessage( - "[shardId {}] [replication id {}] Force replication Sync complete to {}, timing data: {}", - shardId, + "[shardId {}] [replication id {}] Force replication Sync failed, timing data: {}", + indexShard.shardId().getId(), state.getReplicationId(), - indexShard.getLatestReplicationCheckpoint(), state.getTimingData() - ) + ), + e ); - // Promote engine type for primary target - if (indexShard.recoveryState().getPrimary() == true) { - indexShard.resetToWriteableEngine(); - } else { - // Update the replica's checkpoint on primary's replication tracker. - updateVisibleCheckpoint(state.getReplicationId(), indexShard); + if (sendShardFailure) { + failShard(e, indexShard); } - listener.onResponse(TransportResponse.Empty.INSTANCE); - } catch (Exception e) { - logger.error("Error while marking replication completed", e); listener.onFailure(e); } } - - @Override - public void onReplicationFailure(SegmentReplicationState state, ReplicationFailedException e, boolean sendShardFailure) { - logger.error( - () -> new ParameterizedMessage( - "[shardId {}] [replication id {}] Replication failed, timing data: {}", - indexShard.shardId().getId(), - state.getReplicationId(), - state.getTimingData() - ), - e - ); - if (sendShardFailure) { - failShard(e, indexShard); - } - listener.onFailure(e); - } - }); + ); } } diff --git a/server/src/main/java/org/opensearch/indices/replication/checkpoint/SegmentReplicationCheckpointPublisher.java b/server/src/main/java/org/opensearch/indices/replication/checkpoint/SegmentReplicationCheckpointPublisher.java index b4bcdc92e539a..f5cb32b741862 100644 --- a/server/src/main/java/org/opensearch/indices/replication/checkpoint/SegmentReplicationCheckpointPublisher.java +++ b/server/src/main/java/org/opensearch/indices/replication/checkpoint/SegmentReplicationCheckpointPublisher.java @@ -34,7 +34,6 @@ public SegmentReplicationCheckpointPublisher(PublishAction publishAction) { public void publish(IndexShard indexShard, ReplicationCheckpoint checkpoint) { publishAction.publish(indexShard, checkpoint); - indexShard.onCheckpointPublished(checkpoint); } /** diff --git a/server/src/test/java/org/opensearch/index/engine/NRTReplicationEngineTests.java b/server/src/test/java/org/opensearch/index/engine/NRTReplicationEngineTests.java index 64fe42493c686..4c87df48f583f 100644 --- a/server/src/test/java/org/opensearch/index/engine/NRTReplicationEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/NRTReplicationEngineTests.java @@ -12,7 +12,9 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.search.ReferenceManager; +import org.apache.lucene.store.IOContext; import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.UUIDs; import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; @@ -28,6 +30,8 @@ import java.io.IOException; import java.nio.file.Path; +import java.util.Arrays; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; @@ -367,4 +371,72 @@ private NRTReplicationEngine buildNrtReplicaEngine(AtomicLong globalCheckpoint, private NRTReplicationEngine buildNrtReplicaEngine(AtomicLong globalCheckpoint, Store store) throws IOException { return buildNrtReplicaEngine(globalCheckpoint, store, defaultSettings); } + + public void testGetSegmentInfosSnapshotPreservesFilesUntilRelease() throws Exception { + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); + + // TODO: Remove this divergent commit logic and copy Segments_N from primary with node-node. + // randomly toggle commit / no commit. + IndexSettings settings = REMOTE_STORE_INDEX_SETTINGS; + final boolean shouldCommit = randomBoolean(); + if (shouldCommit) { + settings = INDEX_SETTINGS; + } + try ( + final Store nrtEngineStore = createStore(REMOTE_STORE_INDEX_SETTINGS, newDirectory()); + final NRTReplicationEngine nrtEngine = buildNrtReplicaEngine(globalCheckpoint, nrtEngineStore, settings) + ) { + // only index 2 docs here, this will create segments _0 and _1 and after forcemerge into _2. + final int docCount = 2; + List operations = generateHistoryOnReplica(docCount, randomBoolean(), randomBoolean(), randomBoolean()); + for (Engine.Operation op : operations) { + applyOperation(engine, op); + applyOperation(nrtEngine, op); + // refresh to create a lot of segments. + engine.refresh("test"); + } + assertEquals(2, engine.segmentsStats(false, false).getCount()); + // wipe the nrt directory initially so we can sync with primary. + Lucene.cleanLuceneIndex(nrtEngineStore.directory()); + assertFalse( + Arrays.stream(nrtEngineStore.directory().listAll()) + .anyMatch(file -> file.equals("write.lock") == false && file.equals("extra0") == false) + ); + for (String file : engine.getLatestSegmentInfos().files(true)) { + nrtEngineStore.directory().copyFrom(store.directory(), file, file, IOContext.DEFAULT); + } + nrtEngine.updateSegments(engine.getLatestSegmentInfos()); + assertEquals(engine.getLatestSegmentInfos(), nrtEngine.getLatestSegmentInfos()); + final GatedCloseable snapshot = nrtEngine.getSegmentInfosSnapshot(); + final Collection replicaSnapshotFiles = snapshot.get().files(false); + List replicaFiles = List.of(nrtEngine.store.directory().listAll()); + + // merge primary down to 1 segment + engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); + // we expect a 3rd segment to be created after merge. + assertEquals(3, engine.segmentsStats(false, false).getCount()); + final Collection latestPrimaryFiles = engine.getLatestSegmentInfos().files(false); + + // copy new segments in and load reader. + for (String file : latestPrimaryFiles) { + if (replicaFiles.contains(file) == false) { + nrtEngineStore.directory().copyFrom(store.directory(), file, file, IOContext.DEFAULT); + } + } + nrtEngine.updateSegments(engine.getLatestSegmentInfos()); + + replicaFiles = List.of(nrtEngine.store.directory().listAll()); + assertTrue(replicaFiles.containsAll(replicaSnapshotFiles)); + + // close snapshot, files should be cleaned up + snapshot.close(); + + replicaFiles = List.of(nrtEngine.store.directory().listAll()); + assertFalse(replicaFiles.containsAll(replicaSnapshotFiles)); + + // Ensure we still have all the active files. Note - we exclude the infos file here if we aren't committing + // the nrt reader will still reference segments_n-1 after being loaded until a local commit occurs. + assertTrue(replicaFiles.containsAll(nrtEngine.getLatestSegmentInfos().files(shouldCommit))); + } + } } diff --git a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java index f1ae4b16d9bad..8ffeb5d689f55 100644 --- a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java @@ -3753,7 +3753,7 @@ public void testReadSnapshotConcurrently() throws IOException, InterruptedExcept */ public void testCheckpointRefreshListener() throws IOException { final SegmentReplicationCheckpointPublisher mock = mock(SegmentReplicationCheckpointPublisher.class); - IndexShard shard = newStartedShard(p -> newShard(mock), true); + IndexShard shard = newStartedShard(p -> newShard(true, mock), true); List refreshListeners = shard.getEngine().config().getInternalRefreshListener(); assertTrue(refreshListeners.stream().anyMatch(e -> e instanceof CheckpointRefreshListener)); closeShards(shard); @@ -3763,58 +3763,13 @@ public void testCheckpointRefreshListener() throws IOException { * here we are passing null in place of SegmentReplicationCheckpointPublisher and testing on index shard if CheckpointRefreshListener is not added to the InternalrefreshListerners List */ public void testCheckpointRefreshListenerWithNull() throws IOException { - IndexShard shard = newStartedShard(p -> newShard(null), true); + final SegmentReplicationCheckpointPublisher publisher = null; + IndexShard shard = newStartedShard(p -> newShard(true, publisher), true); List refreshListeners = shard.getEngine().config().getInternalRefreshListener(); assertFalse(refreshListeners.stream().anyMatch(e -> e instanceof CheckpointRefreshListener)); closeShards(shard); } - /** - * creates a new initializing shard. The shard will be put in its proper path under the - * current node id the shard is assigned to. - * @param checkpointPublisher Segment Replication Checkpoint Publisher to publish checkpoint - */ - private IndexShard newShard(SegmentReplicationCheckpointPublisher checkpointPublisher) throws IOException { - final ShardId shardId = new ShardId("index", "_na_", 0); - final ShardRouting shardRouting = TestShardRouting.newShardRouting( - shardId, - randomAlphaOfLength(10), - true, - ShardRoutingState.INITIALIZING, - RecoverySource.EmptyStoreRecoverySource.INSTANCE - ); - final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir()); - ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); - - Settings indexSettings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_REPLICATION_TYPE, "SEGMENT") - .put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), between(0, 1000)) - .put(Settings.EMPTY) - .build(); - IndexMetadata metadata = IndexMetadata.builder(shardRouting.getIndexName()) - .settings(indexSettings) - .primaryTerm(0, primaryTerm) - .putMapping("{ \"properties\": {} }") - .build(); - return newShard( - shardRouting, - shardPath, - metadata, - null, - null, - new InternalEngineFactory(), - new EngineConfigFactory(new IndexSettings(metadata, metadata.getSettings())), - () -> {}, - RetentionLeaseSyncer.EMPTY, - EMPTY_EVENT_LISTENER, - checkpointPublisher, - null - ); - } - public void testIndexCheckOnStartup() throws Exception { final IndexShard indexShard = newStartedShard(true); diff --git a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java index 9c02f430e4e6d..58ae4b404e69c 100644 --- a/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/SegmentReplicationIndexShardTests.java @@ -19,6 +19,7 @@ import org.opensearch.action.index.IndexRequest; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.routing.IndexShardRoutingTable; import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.ShardRoutingHelper; import org.opensearch.common.collect.Tuple; @@ -63,6 +64,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -423,7 +425,70 @@ public void testShardIdleWithNoReplicas() throws Exception { /** * here we are starting a new primary shard in PrimaryMode and testing if the shard publishes checkpoint after refresh. */ - public void testPublishCheckpointOnPrimaryMode() throws IOException { + public void testPublishCheckpointOnPrimaryMode() throws IOException, InterruptedException { + final SegmentReplicationCheckpointPublisher mock = mock(SegmentReplicationCheckpointPublisher.class); + IndexShard shard = newStartedShard(p -> newShard(false, mock, settings), false); + + final ShardRouting shardRouting = shard.routingEntry(); + promoteReplica( + shard, + Collections.singleton(shardRouting.allocationId().getId()), + new IndexShardRoutingTable.Builder(shardRouting.shardId()).addShard(shardRouting).build() + ); + + final CountDownLatch latch = new CountDownLatch(1); + shard.acquirePrimaryOperationPermit(new ActionListener() { + @Override + public void onResponse(Releasable releasable) { + releasable.close(); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + throw new RuntimeException(e); + } + }, ThreadPool.Names.GENERIC, ""); + + latch.await(); + // verify checkpoint is published + verify(mock, times(1)).publish(any(), any()); + closeShards(shard); + } + + public void testPublishCheckpointOnPrimaryMode_segrep_off() throws IOException, InterruptedException { + final SegmentReplicationCheckpointPublisher mock = mock(SegmentReplicationCheckpointPublisher.class); + final Settings settings = Settings.builder().put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.DOCUMENT).build(); + IndexShard shard = newStartedShard(p -> newShard(false, mock, settings), false); + + final ShardRouting shardRouting = shard.routingEntry(); + promoteReplica( + shard, + Collections.singleton(shardRouting.allocationId().getId()), + new IndexShardRoutingTable.Builder(shardRouting.shardId()).addShard(shardRouting).build() + ); + + final CountDownLatch latch = new CountDownLatch(1); + shard.acquirePrimaryOperationPermit(new ActionListener() { + @Override + public void onResponse(Releasable releasable) { + releasable.close(); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + throw new RuntimeException(e); + } + }, ThreadPool.Names.GENERIC, ""); + + latch.await(); + // verify checkpoint is published + verify(mock, times(0)).publish(any(), any()); + closeShards(shard); + } + + public void testPublishCheckpointPostFailover() throws IOException { final SegmentReplicationCheckpointPublisher mock = mock(SegmentReplicationCheckpointPublisher.class); IndexShard shard = newStartedShard(true); CheckpointRefreshListener refreshListener = new CheckpointRefreshListener(shard, mock); @@ -482,7 +547,7 @@ public void testRejectCheckpointOnShardRoutingPrimary() throws IOException { spy.onNewCheckpoint(new ReplicationCheckpoint(primaryShard.shardId(), 0L, 0L, 0L, Codec.getDefault().getName()), spyShard); // Verify that checkpoint is not processed as shard routing is primary. - verify(spy, times(0)).startReplication(any(), any()); + verify(spy, times(0)).startReplication(any(), any(), any()); closeShards(primaryShard); } @@ -656,7 +721,7 @@ public void cancel() { } }; when(sourceFactory.get(any())).thenReturn(source); - startReplicationAndAssertCancellation(replica, targetService); + startReplicationAndAssertCancellation(replica, primary, targetService); shards.removeReplica(replica); closeShards(replica); @@ -736,11 +801,15 @@ protected void resolveCheckpointInfoResponseListener(ActionListener() { @@ -422,7 +422,11 @@ public void testTemporaryFilesNotCleanup() throws Exception { runnablePostGetFiles ); when(sourceFactory.get(any())).thenReturn(segmentReplicationSource); - targetService.startReplication(replica, getTargetListener(primaryShard, replica, primaryMetadata, countDownLatch)); + targetService.startReplication( + replica, + primaryShard.getLatestReplicationCheckpoint(), + getTargetListener(primaryShard, replica, primaryMetadata, countDownLatch) + ); countDownLatch.await(30, TimeUnit.SECONDS); assertEquals("Replication failed", 0, countDownLatch.getCount()); shards.assertAllEqual(numDocs); diff --git a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java index efb8dda201e87..4a04a64196918 100644 --- a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java +++ b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java @@ -84,6 +84,7 @@ public class SegmentReplicationTargetServiceTests extends IndexShardTestCase { private IndicesService indicesService; private SegmentReplicationState state; + private ReplicationCheckpoint initialCheckpoint; private static final long TRANSPORT_TIMEOUT = 30000;// 30sec @@ -130,7 +131,7 @@ public void setUp() throws Exception { when(clusterState.nodes()).thenReturn(DiscoveryNodes.builder().add(localNode).build()); sut = prepareForReplication(primaryShard, replicaShard, transportService, indicesService, clusterService); - ReplicationCheckpoint initialCheckpoint = replicaShard.getLatestReplicationCheckpoint(); + initialCheckpoint = primaryShard.getLatestReplicationCheckpoint(); aheadCheckpoint = new ReplicationCheckpoint( initialCheckpoint.getShardId(), initialCheckpoint.getPrimaryTerm(), @@ -165,19 +166,23 @@ public void tearDown() throws Exception { public void testsSuccessfulReplication_listenerCompletes() throws InterruptedException { CountDownLatch latch = new CountDownLatch(1); - sut.startReplication(replicaShard, new SegmentReplicationTargetService.SegmentReplicationListener() { - @Override - public void onReplicationDone(SegmentReplicationState state) { - assertEquals(SegmentReplicationState.Stage.DONE, state.getStage()); - latch.countDown(); - } + sut.startReplication( + replicaShard, + primaryShard.getLatestReplicationCheckpoint(), + new SegmentReplicationTargetService.SegmentReplicationListener() { + @Override + public void onReplicationDone(SegmentReplicationState state) { + assertEquals(SegmentReplicationState.Stage.DONE, state.getStage()); + latch.countDown(); + } - @Override - public void onReplicationFailure(SegmentReplicationState state, ReplicationFailedException e, boolean sendShardFailure) { - logger.error("Unexpected error", e); - Assert.fail("Test should succeed"); + @Override + public void onReplicationFailure(SegmentReplicationState state, ReplicationFailedException e, boolean sendShardFailure) { + logger.error("Unexpected error", e); + Assert.fail("Test should succeed"); + } } - }); + ); latch.await(2, TimeUnit.SECONDS); assertEquals(0, latch.getCount()); } @@ -209,6 +214,7 @@ public void getSegmentFiles( }; final SegmentReplicationTarget target = new SegmentReplicationTarget( replicaShard, + primaryShard.getLatestReplicationCheckpoint(), source, new SegmentReplicationTargetService.SegmentReplicationListener() { @Override @@ -233,7 +239,7 @@ public void onReplicationFailure(SegmentReplicationState state, ReplicationFaile public void testAlreadyOnNewCheckpoint() { SegmentReplicationTargetService spy = spy(sut); spy.onNewCheckpoint(replicaShard.getLatestReplicationCheckpoint(), replicaShard); - verify(spy, times(0)).startReplication(any(), any()); + verify(spy, times(0)).startReplication(any(), any(), any()); } public void testShardAlreadyReplicating() { @@ -271,24 +277,22 @@ public void getSegmentFiles( } }; final SegmentReplicationTarget target = spy( - new SegmentReplicationTarget(replicaShard, source, mock(SegmentReplicationTargetService.SegmentReplicationListener.class)) + new SegmentReplicationTarget( + replicaShard, + primaryShard.getLatestReplicationCheckpoint(), + source, + mock(SegmentReplicationTargetService.SegmentReplicationListener.class) + ) ); + + final SegmentReplicationTargetService spy = spy(sut); + doReturn(false).when(spy).processLatestReceivedCheckpoint(eq(replicaShard), any()); // Start first round of segment replication. - sut.startReplication(target); + spy.startReplication(target); // Start second round of segment replication, this should fail to start as first round is still in-progress - sut.startReplication(replicaShard, new SegmentReplicationTargetService.SegmentReplicationListener() { - @Override - public void onReplicationDone(SegmentReplicationState state) { - Assert.fail("Should not succeed"); - } - - @Override - public void onReplicationFailure(SegmentReplicationState state, ReplicationFailedException e, boolean sendShardFailure) { - assertEquals("Shard " + replicaShard.shardId() + " is already replicating", e.getMessage()); - assertFalse(sendShardFailure); - } - }); + spy.onNewCheckpoint(newPrimaryCheckpoint, replicaShard); + verify(spy, times(1)).processLatestReceivedCheckpoint(eq(replicaShard), any()); blockGetCheckpointMetadata.countDown(); } @@ -337,8 +341,21 @@ public void cancel() { } }; + final ReplicationCheckpoint updatedCheckpoint = new ReplicationCheckpoint( + initialCheckpoint.getShardId(), + initialCheckpoint.getPrimaryTerm(), + initialCheckpoint.getSegmentsGen(), + initialCheckpoint.getSegmentInfosVersion() + 1, + primaryShard.getDefaultCodecName() + ); + final SegmentReplicationTarget targetSpy = spy( - new SegmentReplicationTarget(replicaShard, source, mock(SegmentReplicationTargetService.SegmentReplicationListener.class)) + new SegmentReplicationTarget( + replicaShard, + updatedCheckpoint, + source, + mock(SegmentReplicationTargetService.SegmentReplicationListener.class) + ) ); // start replication. This adds the target to on-ongoing replication collection @@ -352,20 +369,20 @@ public void cancel() { // ensure the old target is cancelled. and new iteration kicks off. verify(targetSpy, times(1)).cancel("Cancelling stuck target after new primary"); - verify(serviceSpy, times(1)).startReplication(eq(replicaShard), any()); + verify(serviceSpy, times(1)).startReplication(eq(replicaShard), any(), any()); } public void testNewCheckpointBehindCurrentCheckpoint() { SegmentReplicationTargetService spy = spy(sut); spy.onNewCheckpoint(checkpoint, replicaShard); - verify(spy, times(0)).startReplication(any(), any()); + verify(spy, times(0)).startReplication(any(), any(), any()); } public void testShardNotStarted() throws IOException { SegmentReplicationTargetService spy = spy(sut); IndexShard shard = newShard(false); spy.onNewCheckpoint(checkpoint, shard); - verify(spy, times(0)).startReplication(any(), any()); + verify(spy, times(0)).startReplication(any(), any(), any()); closeShards(shard); } @@ -381,7 +398,7 @@ public void testRejectCheckpointOnShardPrimaryMode() throws IOException { spy.onNewCheckpoint(aheadCheckpoint, spyShard); // Verify that checkpoint is not processed as shard is in PrimaryMode. - verify(spy, times(0)).startReplication(any(), any()); + verify(spy, times(0)).startReplication(any(), any(), any()); closeShards(primaryShard); } @@ -406,10 +423,10 @@ public void testStartReplicationListenerSuccess() throws InterruptedException { SegmentReplicationTargetService spy = spy(sut); CountDownLatch latch = new CountDownLatch(1); doAnswer(i -> { - ((SegmentReplicationTargetService.SegmentReplicationListener) i.getArgument(1)).onReplicationDone(state); + ((SegmentReplicationTargetService.SegmentReplicationListener) i.getArgument(2)).onReplicationDone(state); latch.countDown(); return null; - }).when(spy).startReplication(any(), any()); + }).when(spy).startReplication(any(), any(), any()); doNothing().when(spy).updateVisibleCheckpoint(eq(0L), any()); spy.afterIndexShardStarted(replicaShard); @@ -422,14 +439,14 @@ public void testStartReplicationListenerFailure() throws InterruptedException { SegmentReplicationTargetService spy = spy(sut); CountDownLatch latch = new CountDownLatch(1); doAnswer(i -> { - ((SegmentReplicationTargetService.SegmentReplicationListener) i.getArgument(1)).onReplicationFailure( + ((SegmentReplicationTargetService.SegmentReplicationListener) i.getArgument(2)).onReplicationFailure( state, new ReplicationFailedException(replicaShard, null), false ); latch.countDown(); return null; - }).when(spy).startReplication(any(), any()); + }).when(spy).startReplication(any(), any(), any()); doNothing().when(spy).updateVisibleCheckpoint(eq(0L), any()); spy.afterIndexShardStarted(replicaShard); @@ -570,6 +587,7 @@ public void testForceSegmentSyncHandlerWithFailure_AlreadyClosedException_swallo public void testTargetCancelledBeforeStartInvoked() { final SegmentReplicationTarget target = new SegmentReplicationTarget( replicaShard, + primaryShard.getLatestReplicationCheckpoint(), mock(SegmentReplicationSource.class), new SegmentReplicationTargetService.SegmentReplicationListener() { @Override diff --git a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetTests.java b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetTests.java index 176954b6d6b3d..5b996fd774baf 100644 --- a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetTests.java +++ b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetTests.java @@ -141,7 +141,7 @@ public void getSegmentFiles( SegmentReplicationTargetService.SegmentReplicationListener segRepListener = mock( SegmentReplicationTargetService.SegmentReplicationListener.class ); - segrepTarget = new SegmentReplicationTarget(spyIndexShard, segrepSource, segRepListener); + segrepTarget = new SegmentReplicationTarget(spyIndexShard, repCheckpoint, segrepSource, segRepListener); segrepTarget.startReplication(new ActionListener() { @Override @@ -189,7 +189,7 @@ public void getSegmentFiles( SegmentReplicationTargetService.SegmentReplicationListener segRepListener = mock( SegmentReplicationTargetService.SegmentReplicationListener.class ); - segrepTarget = new SegmentReplicationTarget(spyIndexShard, segrepSource, segRepListener); + segrepTarget = new SegmentReplicationTarget(spyIndexShard, repCheckpoint, segrepSource, segRepListener); segrepTarget.startReplication(new ActionListener() { @Override @@ -232,7 +232,7 @@ public void getSegmentFiles( SegmentReplicationTargetService.SegmentReplicationListener segRepListener = mock( SegmentReplicationTargetService.SegmentReplicationListener.class ); - segrepTarget = new SegmentReplicationTarget(spyIndexShard, segrepSource, segRepListener); + segrepTarget = new SegmentReplicationTarget(spyIndexShard, repCheckpoint, segrepSource, segRepListener); segrepTarget.startReplication(new ActionListener() { @Override @@ -275,7 +275,7 @@ public void getSegmentFiles( SegmentReplicationTargetService.SegmentReplicationListener segRepListener = mock( SegmentReplicationTargetService.SegmentReplicationListener.class ); - segrepTarget = new SegmentReplicationTarget(spyIndexShard, segrepSource, segRepListener); + segrepTarget = new SegmentReplicationTarget(spyIndexShard, repCheckpoint, segrepSource, segRepListener); doThrow(exception).when(spyIndexShard).finalizeReplication(any()); @@ -320,7 +320,7 @@ public void getSegmentFiles( SegmentReplicationTargetService.SegmentReplicationListener segRepListener = mock( SegmentReplicationTargetService.SegmentReplicationListener.class ); - segrepTarget = new SegmentReplicationTarget(spyIndexShard, segrepSource, segRepListener); + segrepTarget = new SegmentReplicationTarget(spyIndexShard, repCheckpoint, segrepSource, segRepListener); doThrow(exception).when(spyIndexShard).finalizeReplication(any()); @@ -364,7 +364,7 @@ public void getSegmentFiles( SegmentReplicationTargetService.SegmentReplicationListener segRepListener = mock( SegmentReplicationTargetService.SegmentReplicationListener.class ); - segrepTarget = new SegmentReplicationTarget(spyIndexShard, segrepSource, segRepListener); + segrepTarget = new SegmentReplicationTarget(spyIndexShard, repCheckpoint, segrepSource, segRepListener); when(spyIndexShard.getSegmentMetadataMap()).thenReturn(SI_SNAPSHOT_DIFFERENT); segrepTarget.startReplication(new ActionListener() { @Override @@ -417,7 +417,7 @@ public void getSegmentFiles( SegmentReplicationTargetService.SegmentReplicationListener.class ); - segrepTarget = new SegmentReplicationTarget(spyIndexShard, segrepSource, segRepListener); + segrepTarget = new SegmentReplicationTarget(spyIndexShard, repCheckpoint, segrepSource, segRepListener); when(spyIndexShard.getSegmentMetadataMap()).thenReturn(storeMetadataSnapshots.get(0).asMap()); segrepTarget.startReplication(new ActionListener() { @Override diff --git a/server/src/test/java/org/opensearch/recovery/ReplicationCollectionTests.java b/server/src/test/java/org/opensearch/recovery/ReplicationCollectionTests.java index 776173f73ce5c..9c38c5848e297 100644 --- a/server/src/test/java/org/opensearch/recovery/ReplicationCollectionTests.java +++ b/server/src/test/java/org/opensearch/recovery/ReplicationCollectionTests.java @@ -120,11 +120,13 @@ public void testStartMultipleReplicationsForSingleShard() throws Exception { shards.recoverReplica(shard); final SegmentReplicationTarget target1 = new SegmentReplicationTarget( shard, + shards.getPrimary().getLatestReplicationCheckpoint(), mock(SegmentReplicationSource.class), mock(ReplicationListener.class) ); final SegmentReplicationTarget target2 = new SegmentReplicationTarget( shard, + shards.getPrimary().getLatestReplicationCheckpoint(), mock(SegmentReplicationSource.class), mock(ReplicationListener.class) ); diff --git a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java index 29ecc6b376ad0..7ef460ebbfb50 100644 --- a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java @@ -94,6 +94,7 @@ import org.opensearch.index.engine.EngineFactory; import org.opensearch.index.engine.EngineTestCase; import org.opensearch.index.engine.InternalEngineFactory; +import org.opensearch.index.engine.NRTReplicationEngineFactory; import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.SourceToParse; import org.opensearch.index.remote.RemoteRefreshSegmentPressureService; @@ -141,6 +142,7 @@ import org.opensearch.indices.replication.common.ReplicationFailedException; import org.opensearch.indices.replication.common.ReplicationListener; import org.opensearch.indices.replication.common.ReplicationState; +import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.repositories.IndexId; import org.opensearch.repositories.RepositoriesService; import org.opensearch.repositories.Repository; @@ -531,6 +533,58 @@ protected IndexShard newShard( ); } + protected IndexShard newShard(boolean primary, SegmentReplicationCheckpointPublisher checkpointPublisher) throws IOException { + final Settings settings = Settings.builder().put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT).build(); + return newShard(primary, checkpointPublisher, settings); + } + + /** + * creates a new initializing shard. The shard will be put in its proper path under the + * current node id the shard is assigned to. + * @param checkpointPublisher Segment Replication Checkpoint Publisher to publish checkpoint + */ + protected IndexShard newShard(boolean primary, SegmentReplicationCheckpointPublisher checkpointPublisher, Settings settings) + throws IOException { + final ShardId shardId = new ShardId("index", "_na_", 0); + final ShardRouting shardRouting = TestShardRouting.newShardRouting( + shardId, + randomAlphaOfLength(10), + primary, + ShardRoutingState.INITIALIZING, + primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE + ); + final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir()); + ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); + + Settings indexSettings = Settings.builder() + .put(settings) + .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), between(0, 1000)) + .put(Settings.EMPTY) + .build(); + IndexMetadata metadata = IndexMetadata.builder(shardRouting.getIndexName()) + .settings(indexSettings) + .primaryTerm(0, primaryTerm) + .putMapping("{ \"properties\": {} }") + .build(); + return newShard( + shardRouting, + shardPath, + metadata, + null, + null, + new NRTReplicationEngineFactory(), + new EngineConfigFactory(new IndexSettings(metadata, metadata.getSettings())), + () -> {}, + RetentionLeaseSyncer.EMPTY, + EMPTY_EVENT_LISTENER, + checkpointPublisher, + null + ); + } + /** * creates a new initializing shard. * @param routing shard routing to use @@ -1527,10 +1581,7 @@ public void getCheckpointMetadata( ActionListener listener ) { try { - final CopyState copyState = new CopyState( - ReplicationCheckpoint.empty(primaryShard.shardId, primaryShard.getLatestReplicationCheckpoint().getCodec()), - primaryShard - ); + final CopyState copyState = new CopyState(primaryShard.getLatestReplicationCheckpoint(), primaryShard); listener.onResponse( new CheckpointInfoResponse(copyState.getCheckpoint(), copyState.getMetadataMap(), copyState.getInfosBytes()) ); @@ -1585,6 +1636,7 @@ protected final List replicateSegments(IndexShard prim final SegmentReplicationTargetService targetService = prepareForReplication(primaryShard, replica); final SegmentReplicationTarget target = targetService.startReplication( replica, + primaryShard.getLatestReplicationCheckpoint(), getTargetListener(primaryShard, replica, primaryMetadata, countDownLatch) ); ids.add(target); From 56a19eaef346034d8b49fe4cac83111ef093b674 Mon Sep 17 00:00:00 2001 From: Sorabh Date: Thu, 3 Aug 2023 11:00:02 -0700 Subject: [PATCH 54/75] Change to determine if concurrent segment search should be used by the request during SearchContext creation (#9059) * Change to determine if concurrent segment search should be used by the request during SearchContext creation. It caches the e evaluated output for all future invocation. It also provide executor to IndexSearcher based on this evaluation Signed-off-by: Sorabh Hamirwasia * Add test case to enable/disable concurrent search cluster setting and verify context object state Signed-off-by: Sorabh Hamirwasia --------- Signed-off-by: Sorabh Hamirwasia --- .../search/DefaultSearchContext.java | 37 ++++++---- .../internal/FilteredSearchContext.java | 5 ++ .../opensearch/search/SearchServiceTests.java | 69 +++++++++++++++++++ 3 files changed, 98 insertions(+), 13 deletions(-) diff --git a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java index f377a5e315e1b..d10173184f1c6 100644 --- a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java @@ -183,6 +183,7 @@ final class DefaultSearchContext extends SearchContext { private final QueryShardContext queryShardContext; private final FetchPhase fetchPhase; private final Function requestToAggReduceContextBuilder; + private final boolean useConcurrentSearch; DefaultSearchContext( ReaderContext readerContext, @@ -213,13 +214,14 @@ final class DefaultSearchContext extends SearchContext { this.indexShard = readerContext.indexShard(); this.clusterService = clusterService; this.engineSearcher = readerContext.acquireSearcher("search"); + this.useConcurrentSearch = useConcurrentSearch(executor); this.searcher = new ContextIndexSearcher( engineSearcher.getIndexReader(), engineSearcher.getSimilarity(), engineSearcher.getQueryCache(), engineSearcher.getQueryCachingPolicy(), lowLevelCancellation, - executor, + useConcurrentSearch ? executor : null, this ); this.relativeTimeSupplier = relativeTimeSupplier; @@ -878,18 +880,7 @@ public Profilers getProfilers() { */ @Override public boolean isConcurrentSegmentSearchEnabled() { - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH) - && (clusterService != null) - && (searcher().getExecutor() != null)) { - return indexService.getIndexSettings() - .getSettings() - .getAsBoolean( - IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), - clusterService.getClusterSettings().get(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING) - ); - } else { - return false; - } + return useConcurrentSearch; } public void setProfilers(Profilers profilers) { @@ -932,4 +923,24 @@ public void setBucketCollectorProcessor(BucketCollectorProcessor bucketCollector public BucketCollectorProcessor bucketCollectorProcessor() { return bucketCollectorProcessor; } + + /** + * Evaluate based on cluster and index settings if concurrent segment search should be used for this request context + * @return true: use concurrent search + * false: otherwise + */ + private boolean useConcurrentSearch(Executor concurrentSearchExecutor) { + if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH) + && (clusterService != null) + && (concurrentSearchExecutor != null)) { + return indexService.getIndexSettings() + .getSettings() + .getAsBoolean( + IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), + clusterService.getClusterSettings().get(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING) + ); + } else { + return false; + } + } } diff --git a/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java b/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java index bb990e69e7722..02e6568369e16 100644 --- a/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java +++ b/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java @@ -559,4 +559,9 @@ public void setBucketCollectorProcessor(BucketCollectorProcessor bucketCollector public BucketCollectorProcessor bucketCollectorProcessor() { return in.bucketCollectorProcessor(); } + + @Override + public boolean isConcurrentSegmentSearchEnabled() { + return in.isConcurrentSegmentSearchEnabled(); + } } diff --git a/server/src/test/java/org/opensearch/search/SearchServiceTests.java b/server/src/test/java/org/opensearch/search/SearchServiceTests.java index 876a2d15cad7e..8643941970fe0 100644 --- a/server/src/test/java/org/opensearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/opensearch/search/SearchServiceTests.java @@ -1275,8 +1275,77 @@ public void testConcurrentSegmentSearchSearchContext() throws IOException { .getSetting(index, IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey()) ); assertEquals(concurrentSearchEnabled, searchContext.isConcurrentSegmentSearchEnabled()); + // verify executor nullability with concurrent search enabled/disabled + if (concurrentSearchEnabled) { + assertNotNull(searchContext.searcher().getExecutor()); + } else { + assertNull(searchContext.searcher().getExecutor()); + } + } + } + // Cleanup + client().admin() + .cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().putNull(SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey())) + .get(); + } + + /** + * Test that the Search Context for concurrent segment search enabled is set correctly at the time of construction. + * The same is used throughout the context object lifetime even if cluster setting changes before the request completion. + */ + public void testConcurrentSegmentSearchIsSetOnceDuringContextCreation() throws IOException { + String index = randomAlphaOfLengthBetween(5, 10).toLowerCase(Locale.ROOT); + IndexService indexService = createIndex(index); + final SearchService service = getInstanceFromNode(SearchService.class); + ShardId shardId = new ShardId(indexService.index(), 0); + long nowInMillis = System.currentTimeMillis(); + String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(3, 10); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.allowPartialSearchResults(randomBoolean()); + ShardSearchRequest request = new ShardSearchRequest( + OriginalIndices.NONE, + searchRequest, + shardId, + indexService.numberOfShards(), + AliasFilter.EMPTY, + 1f, + nowInMillis, + clusterAlias, + Strings.EMPTY_ARRAY + ); + + Boolean[] concurrentSearchStates = new Boolean[] { true, false }; + for (Boolean concurrentSearchSetting : concurrentSearchStates) { + // update concurrent search cluster setting and create search context + client().admin() + .cluster() + .prepareUpdateSettings() + .setTransientSettings( + Settings.builder().put(SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), concurrentSearchSetting) + ) + .get(); + try (DefaultSearchContext searchContext = service.createSearchContext(request, new TimeValue(System.currentTimeMillis()))) { + // verify concurrent search state in context + assertEquals(concurrentSearchSetting, searchContext.isConcurrentSegmentSearchEnabled()); + // verify executor state in searcher + assertEquals(concurrentSearchSetting, (searchContext.searcher().getExecutor() != null)); + + // update cluster setting to flip the concurrent segment search state + client().admin() + .cluster() + .prepareUpdateSettings() + .setTransientSettings( + Settings.builder().put(SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), !concurrentSearchSetting) + ) + .get(); + + // verify that concurrent segment search is still set to same expected value for the context + assertEquals(concurrentSearchSetting, searchContext.isConcurrentSegmentSearchEnabled()); } } + // Cleanup client().admin() .cluster() From 6e51c475c30ca1a0f0875f9e8574242e4bbd6c9d Mon Sep 17 00:00:00 2001 From: Andrew Ross Date: Thu, 3 Aug 2023 13:07:09 -0700 Subject: [PATCH 55/75] Remove empty if/try-finally in base integ test class (#9098) The [commit to comment out `forceFailure()`][1] was done about 8 years ago so I don't think we'll be bringing it back. Also this makes the code base a slightly friendlier place :) [1]: https://github.com/opensearch-project/OpenSearch/commit/e5a699fa05a6e4ffcb919e8a2a6da8f57e1c2ce8 Signed-off-by: Andrew Ross --- .../test/OpenSearchIntegTestCase.java | 52 ++++++++----------- 1 file changed, 21 insertions(+), 31 deletions(-) diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index 45d69703456fc..422f6d8dfbe7d 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -590,44 +590,34 @@ private static void clearClusters() throws Exception { } private void afterInternal(boolean afterClass) throws Exception { - boolean success = false; + final Scope currentClusterScope = getCurrentClusterScope(); + if (isInternalCluster()) { + internalCluster().clearDisruptionScheme(); + } try { - final Scope currentClusterScope = getCurrentClusterScope(); - if (isInternalCluster()) { - internalCluster().clearDisruptionScheme(); - } - try { - if (cluster() != null) { - if (currentClusterScope != Scope.TEST) { - Metadata metadata = client().admin().cluster().prepareState().execute().actionGet().getState().getMetadata(); + if (cluster() != null) { + if (currentClusterScope != Scope.TEST) { + Metadata metadata = client().admin().cluster().prepareState().execute().actionGet().getState().getMetadata(); - final Set persistentKeys = new HashSet<>(metadata.persistentSettings().keySet()); - assertThat("test leaves persistent cluster metadata behind", persistentKeys, empty()); + final Set persistentKeys = new HashSet<>(metadata.persistentSettings().keySet()); + assertThat("test leaves persistent cluster metadata behind", persistentKeys, empty()); - final Set transientKeys = new HashSet<>(metadata.transientSettings().keySet()); - assertThat("test leaves transient cluster metadata behind", transientKeys, empty()); - } - ensureClusterSizeConsistency(); - ensureClusterStateConsistency(); - ensureClusterStateCanBeReadByNodeTool(); - beforeIndexDeletion(); - cluster().wipe(excludeTemplates()); // wipe after to make sure we fail in the test that didn't ack the delete - if (afterClass || currentClusterScope == Scope.TEST) { - cluster().close(); - } - cluster().assertAfterTest(); + final Set transientKeys = new HashSet<>(metadata.transientSettings().keySet()); + assertThat("test leaves transient cluster metadata behind", transientKeys, empty()); } - } finally { - if (currentClusterScope == Scope.TEST) { - clearClusters(); // it is ok to leave persistent / transient cluster state behind if scope is TEST + ensureClusterSizeConsistency(); + ensureClusterStateConsistency(); + ensureClusterStateCanBeReadByNodeTool(); + beforeIndexDeletion(); + cluster().wipe(excludeTemplates()); // wipe after to make sure we fail in the test that didn't ack the delete + if (afterClass || currentClusterScope == Scope.TEST) { + cluster().close(); } + cluster().assertAfterTest(); } - success = true; } finally { - if (!success) { - // if we failed here that means that something broke horribly so we should clear all clusters - // TODO: just let the exception happen, WTF is all this horseshit - // afterTestRule.forceFailure(); + if (currentClusterScope == Scope.TEST) { + clearClusters(); // it is ok to leave persistent / transient cluster state behind if scope is TEST } } } From 9357b6192e3c107844b4cbe9bacd3502a472e58d Mon Sep 17 00:00:00 2001 From: Suraj Singh Date: Thu, 3 Aug 2023 14:42:15 -0700 Subject: [PATCH 56/75] [Remote Store] Add missing unit test in main (#9101) Signed-off-by: Suraj Singh --- .../RemoteSegmentStoreDirectoryTests.java | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java b/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java index 3b2e33388925a..f2d39b2ac7bee 100644 --- a/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java +++ b/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java @@ -758,6 +758,35 @@ public void testUploadMetadataNonEmpty() throws IOException { } } + public void testUploadMetadataMissingSegment() throws IOException { + populateMetadata(); + remoteSegmentStoreDirectory.init(); + + Directory storeDirectory = mock(Directory.class); + IndexOutput indexOutput = mock(IndexOutput.class); + + String generation = RemoteStoreUtils.invertLong(segmentInfos.getGeneration()); + long primaryTermLong = indexShard.getLatestReplicationCheckpoint().getPrimaryTerm(); + String primaryTerm = RemoteStoreUtils.invertLong(primaryTermLong); + when(storeDirectory.createOutput(startsWith("metadata__" + primaryTerm + "__" + generation), eq(IOContext.DEFAULT))).thenReturn( + indexOutput + ); + + Collection segmentFiles = List.of("_123.si"); + assertThrows( + NoSuchFileException.class, + () -> remoteSegmentStoreDirectory.uploadMetadata( + segmentFiles, + segmentInfos, + storeDirectory, + 12L, + indexShard.getLatestReplicationCheckpoint() + ) + ); + verify(indexOutput).close(); + verify(storeDirectory).deleteFile(startsWith("metadata__" + primaryTerm + "__" + generation)); + } + public void testUploadMetadataNoSegmentCommitInfos() throws IOException { SegmentInfos segInfos = indexShard.store().readLastCommittedSegmentsInfo(); int numSegCommitInfos = segInfos.size(); From 24595c9fbace3516e886137058f3a8b992937652 Mon Sep 17 00:00:00 2001 From: Neetika Singhal Date: Thu, 3 Aug 2023 17:31:08 -0700 Subject: [PATCH 57/75] Make MultiBucketConsumerService thread safe to use across slices during search (#9047) Signed-off-by: Neetika Singhal --- CHANGELOG.md | 2 +- .../MultiBucketConsumerService.java | 54 ++++++++++++-- .../MultiBucketConsumerTests.java | 72 +++++++++++++++++++ 3 files changed, 121 insertions(+), 7 deletions(-) create mode 100644 server/src/test/java/org/opensearch/search/aggregations/MultiBucketConsumerTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index e29bbd2da4db5..3b81cd3a60deb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -110,7 +110,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Create separate SourceLookup instance per segment slice in SignificantTextAggregatorFactory ([#8807](https://github.com/opensearch-project/OpenSearch/pull/8807)) - Add support for aggregation profiler with concurrent aggregation ([#8801](https://github.com/opensearch-project/OpenSearch/pull/8801)) - [Remove] Deprecated Fractional ByteSizeValue support #9005 ([#9005](https://github.com/opensearch-project/OpenSearch/pull/9005)) - +- Make MultiBucketConsumerService thread safe to use across slices during search ([#9047](https://github.com/opensearch-project/OpenSearch/pull/9047)) ### Deprecated ### Removed diff --git a/server/src/main/java/org/opensearch/search/aggregations/MultiBucketConsumerService.java b/server/src/main/java/org/opensearch/search/aggregations/MultiBucketConsumerService.java index f1416fddebfa2..825e37b7cd952 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/MultiBucketConsumerService.java +++ b/server/src/main/java/org/opensearch/search/aggregations/MultiBucketConsumerService.java @@ -33,6 +33,7 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Setting; @@ -42,6 +43,7 @@ import org.opensearch.search.aggregations.bucket.BucketsAggregator; import java.io.IOException; +import java.util.concurrent.atomic.LongAdder; import java.util.function.IntConsumer; /** @@ -127,13 +129,36 @@ public static class MultiBucketConsumer implements IntConsumer { private final int limit; private final CircuitBreaker breaker; - // aggregations execute in a single thread so no atomic here + // aggregations execute in a single thread for both sequential + // and concurrent search, so no atomic here private int count; - private int callCount = 0; + + // will be updated by multiple threads in concurrent search + // hence making it as LongAdder + private final LongAdder callCount; + private volatile boolean circuitBreakerTripped; + private final int availProcessors; public MultiBucketConsumer(int limit, CircuitBreaker breaker) { this.limit = limit; this.breaker = breaker; + callCount = new LongAdder(); + availProcessors = Runtime.getRuntime().availableProcessors(); + } + + // only visible for testing + protected MultiBucketConsumer( + int limit, + CircuitBreaker breaker, + LongAdder callCount, + boolean circuitBreakerTripped, + int availProcessors + ) { + this.limit = limit; + this.breaker = breaker; + this.callCount = callCount; + this.circuitBreakerTripped = circuitBreakerTripped; + this.availProcessors = availProcessors; } @Override @@ -153,10 +178,27 @@ public void accept(int value) { ); } } - // check parent circuit breaker every 1024 calls - callCount++; - if ((callCount & 0x3FF) == 0) { - breaker.addEstimateBytesAndMaybeBreak(0, "allocated_buckets"); + callCount.increment(); + // tripping the circuit breaker for other threads in case of concurrent search + // if the circuit breaker has tripped for one of the threads already, more info + // can be found on: https://github.com/opensearch-project/OpenSearch/issues/7785 + if (circuitBreakerTripped) { + throw new CircuitBreakingException( + "Circuit breaker for this consumer has already been tripped by previous invocations. " + + "This can happen in case of concurrent segment search when multiple threads are " + + "executing the request and one of the thread has already tripped the circuit breaker", + breaker.getDurability() + ); + } + // check parent circuit breaker every 1024 to (1024 + available processors) calls + long sum = callCount.sum(); + if ((sum >= 1024) && (sum & 0x3FF) <= availProcessors) { + try { + breaker.addEstimateBytesAndMaybeBreak(0, "allocated_buckets"); + } catch (CircuitBreakingException e) { + circuitBreakerTripped = true; + throw e; + } } } diff --git a/server/src/test/java/org/opensearch/search/aggregations/MultiBucketConsumerTests.java b/server/src/test/java/org/opensearch/search/aggregations/MultiBucketConsumerTests.java new file mode 100644 index 0000000000000..eda705f95bf9b --- /dev/null +++ b/server/src/test/java/org/opensearch/search/aggregations/MultiBucketConsumerTests.java @@ -0,0 +1,72 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations; + +import org.mockito.Mockito; +import org.opensearch.core.common.breaker.CircuitBreaker; +import org.opensearch.core.common.breaker.CircuitBreakingException; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.concurrent.atomic.LongAdder; + +import static org.opensearch.search.aggregations.MultiBucketConsumerService.DEFAULT_MAX_BUCKETS; + +public class MultiBucketConsumerTests extends OpenSearchTestCase { + + public void testMultiConsumerAcceptWhenCBTripped() { + CircuitBreaker breaker = Mockito.mock(CircuitBreaker.class); + MultiBucketConsumerService.MultiBucketConsumer multiBucketConsumer = new MultiBucketConsumerService.MultiBucketConsumer( + DEFAULT_MAX_BUCKETS, + breaker, + new LongAdder(), + true, + 1 + ); + // exception is thrown upfront since the circuit breaker has already tripped + expectThrows(CircuitBreakingException.class, () -> multiBucketConsumer.accept(0)); + Mockito.verify(breaker, Mockito.times(0)).addEstimateBytesAndMaybeBreak(0, "allocated_buckets"); + } + + public void testMultiConsumerAcceptToTripCB() { + CircuitBreaker breaker = Mockito.mock(CircuitBreaker.class); + LongAdder callCount = new LongAdder(); + for (int i = 0; i < 1024; i++) { + callCount.increment(); + } + MultiBucketConsumerService.MultiBucketConsumer multiBucketConsumer = new MultiBucketConsumerService.MultiBucketConsumer( + DEFAULT_MAX_BUCKETS, + breaker, + callCount, + false, + 2 + ); + // circuit breaker check is performed as the value of call count would be 1025 which is still in range + Mockito.when(breaker.addEstimateBytesAndMaybeBreak(0, "allocated_buckets")).thenThrow(CircuitBreakingException.class); + expectThrows(CircuitBreakingException.class, () -> multiBucketConsumer.accept(0)); + Mockito.verify(breaker, Mockito.times(1)).addEstimateBytesAndMaybeBreak(0, "allocated_buckets"); + } + + public void testMultiConsumerAccept() { + CircuitBreaker breaker = Mockito.mock(CircuitBreaker.class); + LongAdder callCount = new LongAdder(); + for (int i = 0; i < 1100; i++) { + callCount.increment(); + } + MultiBucketConsumerService.MultiBucketConsumer multiBucketConsumer = new MultiBucketConsumerService.MultiBucketConsumer( + DEFAULT_MAX_BUCKETS, + breaker, + callCount, + false, + 1 + ); + // no exception is thrown as the call count value is not in the expected range and CB is not checked + multiBucketConsumer.accept(0); + Mockito.verify(breaker, Mockito.times(0)).addEstimateBytesAndMaybeBreak(0, "allocated_buckets"); + } +} From 5bb7fa3ead94051f9fe3e8772f8f093d422a356d Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Fri, 4 Aug 2023 14:55:34 -0500 Subject: [PATCH 58/75] [Refactor] CollectionUtils, Iterators, and String methods to core lib (#9120) This commit refactors the CollectionUtils, and Iterators utility classes to the core and common libraries, respectively. Several other base Strings utility methods are also refactored to the core Strings utility library. This is done to incrementally move the remaining Strings methods in server to the core String utility. Signed-off-by: Nicholas Walter Knize --- .../opensearch/client/RequestConverters.java | 2 +- .../client/ClusterRequestConvertersTests.java | 2 +- .../client/IndicesRequestConvertersTests.java | 2 +- .../opensearch/common/collect/Iterators.java | 0 .../common/collect/IteratorsTests.java | 2 + .../org/opensearch/core/common/Strings.java | 225 +++++++++++++----- .../core}/common/util/CollectionUtils.java | 6 +- .../opensearch/core/common/StringsTests.java | 38 +++ .../geo/algorithm/PolygonGenerator.java | 2 +- .../ingest/common/ScriptProcessor.java | 2 +- .../CustomReflectionObjectHandler.java | 2 +- .../painless/lookup/PainlessClass.java | 2 +- .../painless/lookup/PainlessLookup.java | 2 +- .../AbstractAzureComputeServiceTestCase.java | 2 +- .../RemoveCorruptedShardDataCommandIT.java | 2 +- .../index/store/CorruptedFileIT.java | 2 +- .../recovery/RecoveryWhileUnderLoadIT.java | 2 +- .../bucket/terms/BaseStringTermsTestCase.java | 2 +- .../metrics/HDRPercentileRanksIT.java | 2 +- .../metrics/HDRPercentilesIT.java | 2 +- .../metrics/TDigestPercentileRanksIT.java | 2 +- .../metrics/TDigestPercentilesIT.java | 2 +- .../suggest/CompletionSuggestSearchIT.java | 2 +- .../snapshots/SearchableSnapshotIT.java | 2 +- .../search/grouping/CollapseTopFieldDocs.java | 2 +- .../health/TransportClusterHealthAction.java | 2 +- .../TransportSnapshotsStatusAction.java | 2 +- .../indices/close/CloseIndexRequest.java | 2 +- .../indices/close/CloseIndexResponse.java | 2 +- .../datastream/DeleteDataStreamAction.java | 2 +- .../indices/delete/DeleteIndexRequest.java | 2 +- .../mapping/put/PutMappingRequest.java | 6 +- .../admin/indices/open/OpenIndexRequest.java | 2 +- .../readonly/AddIndexBlockRequest.java | 2 +- .../readonly/AddIndexBlockResponse.java | 2 +- .../get/TransportGetSettingsAction.java | 2 +- .../search/SearchPhaseExecutionException.java | 2 +- .../support/tasks/BaseTasksRequest.java | 2 +- .../metadata/IndexNameExpressionResolver.java | 2 +- .../routing/RotationShardShuffler.java | 2 +- .../java/org/opensearch/common/Strings.java | 113 --------- .../opensearch/common/logging/Loggers.java | 2 +- .../org/opensearch/common/regex/Regex.java | 4 +- .../identity/tokens/RestTokenExtractor.java | 2 +- .../org/opensearch/index/IndexingSlowLog.java | 2 +- .../index/analysis/CustomAnalyzer.java | 2 +- .../analysis/ReloadableCustomAnalyzer.java | 2 +- .../index/engine/CompletionStatsCache.java | 2 +- .../index/fielddata/ShardFieldData.java | 2 +- .../index/mapper/BinaryFieldMapper.java | 2 +- .../index/mapper/IndexFieldMapper.java | 2 +- .../index/mapper/SourceFieldMapper.java | 2 +- .../index/query/GeoValidationMethod.java | 2 +- .../org/opensearch/index/query/Operator.java | 2 +- .../index/query/SimpleQueryStringBuilder.java | 2 +- .../reindex/ClientScrollableHitSource.java | 2 +- .../index/search/stats/ShardSearchStats.java | 2 +- .../opensearch/indices/IndicesService.java | 2 +- .../java/org/opensearch/rest/RestRequest.java | 13 +- .../opensearch/rest/action/RestActions.java | 2 +- .../rest/action/cat/RestNodeAttrsAction.java | 2 +- .../rest/action/cat/RestNodesAction.java | 2 +- .../rest/action/cat/RestTasksAction.java | 2 +- .../org/opensearch/search/SearchService.java | 2 +- .../metrics/InternalScriptedMetric.java | 2 +- .../metrics/ScriptedMetricAggregator.java | 2 +- .../aggregations/support/ValuesSource.java | 2 +- .../support/values/ScriptBytesValues.java | 2 +- .../fetch/subphase/ScriptFieldsPhase.java | 2 +- .../highlight/FastVectorHighlighter.java | 2 +- .../java/org/opensearch/tasks/TaskId.java | 2 +- .../org/opensearch/watcher/FileWatcher.java | 2 +- ...dateSettingsRequestSerializationTests.java | 2 +- .../allocation/AddIncrementallyTests.java | 2 +- .../org/opensearch/common/StringsTests.java | 45 +--- .../common/collect/EvictingQueueTests.java | 2 +- .../common/util/CollectionUtilsTests.java | 3 +- .../common/xcontent/BaseXContentTestCase.java | 2 +- .../query/MoreLikeThisQueryBuilderTests.java | 2 +- ...archAllocationWithConstraintsTestCase.java | 2 +- .../coordination/LinearizabilityChecker.java | 2 +- .../test/OpenSearchIntegTestCase.java | 2 +- .../opensearch/test/OpenSearchTestCase.java | 2 +- 83 files changed, 302 insertions(+), 297 deletions(-) rename {server => libs/common}/src/main/java/org/opensearch/common/collect/Iterators.java (100%) rename {server => libs/common}/src/test/java/org/opensearch/common/collect/IteratorsTests.java (98%) rename {server/src/main/java/org/opensearch => libs/core/src/main/java/org/opensearch/core}/common/util/CollectionUtils.java (98%) diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java index d23a5976fada6..c48acfc5b6ece 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java @@ -76,7 +76,7 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.Strings; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/ClusterRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/ClusterRequestConvertersTests.java index f201599632969..5ac3969a6e887 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/ClusterRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/ClusterRequestConvertersTests.java @@ -40,7 +40,7 @@ import org.opensearch.client.cluster.RemoteInfoRequest; import org.opensearch.cluster.health.ClusterHealthStatus; import org.opensearch.common.Priority; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.test.OpenSearchTestCase; import org.apache.hc.client5.http.classic.methods.HttpGet; import org.apache.hc.client5.http.classic.methods.HttpPut; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java index c672ed6be110d..75c5a71303af4 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java @@ -74,7 +74,7 @@ import org.opensearch.common.CheckedFunction; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.Strings; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/main/java/org/opensearch/common/collect/Iterators.java b/libs/common/src/main/java/org/opensearch/common/collect/Iterators.java similarity index 100% rename from server/src/main/java/org/opensearch/common/collect/Iterators.java rename to libs/common/src/main/java/org/opensearch/common/collect/Iterators.java diff --git a/server/src/test/java/org/opensearch/common/collect/IteratorsTests.java b/libs/common/src/test/java/org/opensearch/common/collect/IteratorsTests.java similarity index 98% rename from server/src/test/java/org/opensearch/common/collect/IteratorsTests.java rename to libs/common/src/test/java/org/opensearch/common/collect/IteratorsTests.java index 6ad272542dbb1..2578926454c0b 100644 --- a/server/src/test/java/org/opensearch/common/collect/IteratorsTests.java +++ b/libs/common/src/test/java/org/opensearch/common/collect/IteratorsTests.java @@ -83,6 +83,7 @@ public void testRandomSingleton() { int numberOfIterators = randomIntBetween(1, 1000); int singletonIndex = randomIntBetween(0, numberOfIterators - 1); int value = randomInt(); + @SuppressWarnings("rawtypes") Iterator[] iterators = new Iterator[numberOfIterators]; for (int i = 0; i < numberOfIterators; i++) { iterators[i] = i != singletonIndex ? empty() : singletonIterator(value); @@ -92,6 +93,7 @@ public void testRandomSingleton() { public void testRandomIterators() { int numberOfIterators = randomIntBetween(1, 1000); + @SuppressWarnings("rawtypes") Iterator[] iterators = new Iterator[numberOfIterators]; List values = new ArrayList<>(); for (int i = 0; i < numberOfIterators; i++) { diff --git a/libs/core/src/main/java/org/opensearch/core/common/Strings.java b/libs/core/src/main/java/org/opensearch/core/common/Strings.java index ffb7711f84492..2e610f6047fd4 100644 --- a/libs/core/src/main/java/org/opensearch/core/common/Strings.java +++ b/libs/core/src/main/java/org/opensearch/core/common/Strings.java @@ -9,6 +9,7 @@ package org.opensearch.core.common; import org.opensearch.common.Nullable; +import org.opensearch.core.common.util.CollectionUtils; import java.io.BufferedReader; import java.io.StringReader; @@ -35,18 +36,6 @@ public class Strings { public static final String UNKNOWN_UUID_VALUE = "_na_"; public static final String[] EMPTY_ARRAY = new String[0]; - /** - * Split the specified string by commas to an array. - * - * @param s the string to split - * @return the array of split values - * @see String#split(String) - */ - public static String[] splitStringByCommaToArray(final String s) { - if (s == null || s.isEmpty()) return Strings.EMPTY_ARRAY; - else return s.split(","); - } - /** * Convenience method to return a Collection as a delimited (e.g. CSV) * String. E.g. useful for toString() implementations. @@ -341,47 +330,6 @@ public static String[] toStringArray(final Collection collection) { return collection.toArray(new String[0]); } - /** - * Take a String which is a delimited list and convert it to a String array. - *

A single delimiter can consists of more than one character: It will still - * be considered as single delimiter string, rather than as bunch of potential - * delimiter characters - in contrast to tokenizeToStringArray. - * - * @param str the input String - * @param delimiter the delimiter between elements (this is a single delimiter, - * rather than a bunch individual delimiter characters) - * @param charsToDelete a set of characters to delete. Useful for deleting unwanted - * line breaks: e.g. "\r\n\f" will delete all new lines and line feeds in a String. - * @return an array of the tokens in the list - * @see #tokenizeToStringArray - */ - public static String[] delimitedListToStringArray(String str, String delimiter, String charsToDelete) { - if (str == null) { - return Strings.EMPTY_ARRAY; - } - if (delimiter == null) { - return new String[] { str }; - } - List result = new ArrayList<>(); - if ("".equals(delimiter)) { - for (int i = 0; i < str.length(); i++) { - result.add(deleteAny(str.substring(i, i + 1), charsToDelete)); - } - } else { - int pos = 0; - int delPos; - while ((delPos = str.indexOf(delimiter, pos)) != -1) { - result.add(deleteAny(str.substring(pos, delPos), charsToDelete)); - pos = delPos + delimiter.length(); - } - if (str.length() > 0 && pos <= str.length()) { - // Add rest of String, but not in case of empty input. - result.add(deleteAny(str.substring(pos), charsToDelete)); - } - } - return toStringArray(result); - } - /** * Tokenize the specified string by commas to a set, trimming whitespace and ignoring empty tokens. * @@ -393,6 +341,41 @@ public static Set tokenizeByCommaToSet(final String s) { return tokenizeToCollection(s, ",", HashSet::new); } + /** + * Split the specified string by commas to an array. + * + * @param s the string to split + * @return the array of split values + * @see String#split(String) + */ + public static String[] splitStringByCommaToArray(final String s) { + if (s == null || s.isEmpty()) return Strings.EMPTY_ARRAY; + else return s.split(","); + } + + /** + * Split a String at the first occurrence of the delimiter. + * Does not include the delimiter in the result. + * + * @param toSplit the string to split + * @param delimiter to split the string up with + * @return a two element array with index 0 being before the delimiter, and + * index 1 being after the delimiter (neither element includes the delimiter); + * or null if the delimiter wasn't found in the given input String + */ + public static String[] split(String toSplit, String delimiter) { + if (hasLength(toSplit) == false || hasLength(delimiter) == false) { + return null; + } + int offset = toSplit.indexOf(delimiter); + if (offset < 0) { + return null; + } + String beforeDelimiter = toSplit.substring(0, offset); + String afterDelimiter = toSplit.substring(offset + delimiter.length()); + return new String[] { beforeDelimiter, afterDelimiter }; + } + /** * Tokenize the given String into a String array via a StringTokenizer. * Trims tokens and omits empty tokens. @@ -446,6 +429,47 @@ private static > T tokenizeToCollection( return tokens; } + /** + * Take a String which is a delimited list and convert it to a String array. + *

A single delimiter can consists of more than one character: It will still + * be considered as single delimiter string, rather than as bunch of potential + * delimiter characters - in contrast to tokenizeToStringArray. + * + * @param str the input String + * @param delimiter the delimiter between elements (this is a single delimiter, + * rather than a bunch individual delimiter characters) + * @param charsToDelete a set of characters to delete. Useful for deleting unwanted + * line breaks: e.g. "\r\n\f" will delete all new lines and line feeds in a String. + * @return an array of the tokens in the list + * @see #tokenizeToStringArray + */ + public static String[] delimitedListToStringArray(String str, String delimiter, String charsToDelete) { + if (str == null) { + return Strings.EMPTY_ARRAY; + } + if (delimiter == null) { + return new String[] { str }; + } + List result = new ArrayList<>(); + if ("".equals(delimiter)) { + for (int i = 0; i < str.length(); i++) { + result.add(deleteAny(str.substring(i, i + 1), charsToDelete)); + } + } else { + int pos = 0; + int delPos; + while ((delPos = str.indexOf(delimiter, pos)) != -1) { + result.add(deleteAny(str.substring(pos, delPos), charsToDelete)); + pos = delPos + delimiter.length(); + } + if (str.length() > 0 && pos <= str.length()) { + // Add rest of String, but not in case of empty input. + result.add(deleteAny(str.substring(pos), charsToDelete)); + } + } + return toStringArray(result); + } + /** * Take a String which is a delimited list and convert it to a String array. *

A single delimiter can consists of more than one character: It will still @@ -486,10 +510,6 @@ public static Set commaDelimitedListToSet(String str) { return set; } - public static boolean isNullOrEmpty(@Nullable String s) { - return s == null || s.isEmpty(); - } - /** * Capitalize a String, changing the first letter to * upper case as per {@link Character#toUpperCase(char)}. @@ -538,4 +558,95 @@ public static String format1Decimals(double value, String suffix) { } } } + + /** + * Return substring(beginIndex, endIndex) that is impervious to string length. + */ + public static String substring(String s, int beginIndex, int endIndex) { + if (s == null) { + return s; + } + + int realEndIndex = s.length() > 0 ? s.length() - 1 : 0; + + if (endIndex > realEndIndex) { + return s.substring(beginIndex); + } else { + return s.substring(beginIndex, endIndex); + } + } + + /** + * If an array only consists of zero or one element, which is "*" or "_all" return an empty array + * which is usually used as everything + */ + public static boolean isAllOrWildcard(String[] data) { + return CollectionUtils.isEmpty(data) || data.length == 1 && isAllOrWildcard(data[0]); + } + + /** + * Returns `true` if the string is `_all` or `*`. + */ + public static boolean isAllOrWildcard(String data) { + return "_all".equals(data) || "*".equals(data); + } + + /** + * Truncates string to a length less than length. Backtracks to throw out + * high surrogates. + */ + public static String cleanTruncate(String s, int length) { + if (s == null) { + return s; + } + /* + * Its pretty silly for you to truncate to 0 length but just in case + * someone does this shouldn't break. + */ + if (length == 0) { + return ""; + } + if (length >= s.length()) { + return s; + } + if (Character.isHighSurrogate(s.charAt(length - 1))) { + length--; + } + return s.substring(0, length); + } + + public static boolean isNullOrEmpty(@Nullable String s) { + return s == null || s.isEmpty(); + } + + public static String padStart(String s, int minimumLength, char c) { + if (s == null) { + throw new NullPointerException("s"); + } + if (s.length() >= minimumLength) { + return s; + } else { + StringBuilder sb = new StringBuilder(minimumLength); + for (int i = s.length(); i < minimumLength; i++) { + sb.append(c); + } + + sb.append(s); + return sb.toString(); + } + } + + public static String toLowercaseAscii(String in) { + StringBuilder out = new StringBuilder(); + Iterator iter = in.codePoints().iterator(); + while (iter.hasNext()) { + int codepoint = iter.next(); + if (codepoint > 128) { + out.appendCodePoint(codepoint); + } else { + out.appendCodePoint(Character.toLowerCase(codepoint)); + } + } + return out.toString(); + } } diff --git a/server/src/main/java/org/opensearch/common/util/CollectionUtils.java b/libs/core/src/main/java/org/opensearch/core/common/util/CollectionUtils.java similarity index 98% rename from server/src/main/java/org/opensearch/common/util/CollectionUtils.java rename to libs/core/src/main/java/org/opensearch/core/common/util/CollectionUtils.java index 2037b2e46488f..5cb8b900814b9 100644 --- a/server/src/main/java/org/opensearch/common/util/CollectionUtils.java +++ b/libs/core/src/main/java/org/opensearch/core/common/util/CollectionUtils.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.opensearch.common.util; +package org.opensearch.core.common.util; import org.opensearch.common.collect.Iterators; import org.opensearch.core.common.Strings; @@ -217,6 +217,7 @@ public int size() { } } + @SuppressWarnings("unchecked") public static ArrayList iterableAsArrayList(Iterable elements) { if (elements == null) { throw new NullPointerException("elements"); @@ -232,6 +233,7 @@ public static ArrayList iterableAsArrayList(Iterable element } } + @SuppressWarnings("unchecked") public static ArrayList arrayAsArrayList(E... elements) { if (elements == null) { throw new NullPointerException("elements"); @@ -239,6 +241,7 @@ public static ArrayList arrayAsArrayList(E... elements) { return new ArrayList<>(Arrays.asList(elements)); } + @SuppressWarnings("unchecked") public static ArrayList asArrayList(E first, E... other) { if (other == null) { throw new NullPointerException("other"); @@ -249,6 +252,7 @@ public static ArrayList asArrayList(E first, E... other) { return list; } + @SuppressWarnings("unchecked") public static ArrayList asArrayList(E first, E second, E... other) { if (other == null) { throw new NullPointerException("other"); diff --git a/libs/core/src/test/java/org/opensearch/core/common/StringsTests.java b/libs/core/src/test/java/org/opensearch/core/common/StringsTests.java index 532251e02e685..ca0bd788c1991 100644 --- a/libs/core/src/test/java/org/opensearch/core/common/StringsTests.java +++ b/libs/core/src/test/java/org/opensearch/core/common/StringsTests.java @@ -11,8 +11,46 @@ import org.opensearch.common.util.set.Sets; import org.opensearch.test.OpenSearchTestCase; +import static org.hamcrest.Matchers.is; + /** tests for Strings utility class */ public class StringsTests extends OpenSearchTestCase { + public void testIsAllOrWildCardString() { + assertThat(Strings.isAllOrWildcard("_all"), is(true)); + assertThat(Strings.isAllOrWildcard("*"), is(true)); + assertThat(Strings.isAllOrWildcard("foo"), is(false)); + assertThat(Strings.isAllOrWildcard(""), is(false)); + assertThat(Strings.isAllOrWildcard((String) null), is(false)); + } + + public void testSubstring() { + assertEquals(null, Strings.substring(null, 0, 1000)); + assertEquals("foo", Strings.substring("foo", 0, 1000)); + assertEquals("foo", Strings.substring("foo", 0, 3)); + assertEquals("oo", Strings.substring("foo", 1, 3)); + assertEquals("oo", Strings.substring("foo", 1, 100)); + assertEquals("f", Strings.substring("foo", 0, 1)); + } + + public void testCleanTruncate() { + assertEquals(null, Strings.cleanTruncate(null, 10)); + assertEquals("foo", Strings.cleanTruncate("foo", 10)); + assertEquals("foo", Strings.cleanTruncate("foo", 3)); + // Throws out high surrogates + assertEquals("foo", Strings.cleanTruncate("foo\uD83D\uDEAB", 4)); + // But will keep the whole character + assertEquals("foo\uD83D\uDEAB", Strings.cleanTruncate("foo\uD83D\uDEAB", 5)); + /* + * Doesn't take care around combining marks. This example has its + * meaning changed because that last codepoint is supposed to combine + * backwards into the find "o" and be represented as the "o" with a + * circle around it with a slash through it. As in "no 'o's allowed + * here. + */ + assertEquals("o", org.opensearch.core.common.Strings.cleanTruncate("o\uD83D\uDEAB", 1)); + assertEquals("", org.opensearch.core.common.Strings.cleanTruncate("foo", 0)); + } + public void testSplitStringToSet() { assertEquals(Strings.tokenizeByCommaToSet(null), Sets.newHashSet()); assertEquals(Strings.tokenizeByCommaToSet(""), Sets.newHashSet()); diff --git a/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java b/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java index 246ece4342cff..da1d97260ec96 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java +++ b/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java @@ -10,7 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import java.awt.geom.Point2D; import java.util.ArrayList; diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java index 5f61091495cd5..b66d0b709a824 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java @@ -34,7 +34,7 @@ import org.opensearch.common.Nullable; import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomReflectionObjectHandler.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomReflectionObjectHandler.java index 57451a027c5d7..0936dee0c3016 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomReflectionObjectHandler.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomReflectionObjectHandler.java @@ -33,7 +33,7 @@ package org.opensearch.script.mustache; import com.github.mustachejava.reflect.ReflectionObjectHandler; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.common.util.iterable.Iterables; import java.lang.reflect.Array; diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClass.java index efa2d51524557..fdf7df94252b6 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessClass.java @@ -32,7 +32,7 @@ package org.opensearch.painless.lookup; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import java.lang.invoke.MethodHandle; import java.util.Map; diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookup.java index 1249a9cffecb2..9a3b8bf9e2eee 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookup.java @@ -32,7 +32,7 @@ package org.opensearch.painless.lookup; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import java.lang.invoke.MethodHandle; import java.util.Map; diff --git a/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java b/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java index d5d6aae23f344..9a6327a5c88ba 100644 --- a/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java +++ b/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java @@ -63,7 +63,7 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import static org.opensearch.common.util.CollectionUtils.newSingletonArrayList; +import static org.opensearch.core.common.util.CollectionUtils.newSingletonArrayList; import static org.opensearch.discovery.DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING; public abstract class AbstractAzureComputeServiceTestCase extends OpenSearchIntegTestCase { diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java index df2c8c62ca392..986155b99217e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandIT.java @@ -105,7 +105,7 @@ import java.util.stream.StreamSupport; import static org.opensearch.action.admin.cluster.node.stats.NodesStatsRequest.Metric.FS; -import static org.opensearch.common.util.CollectionUtils.iterableAsArrayList; +import static org.opensearch.core.common.util.CollectionUtils.iterableAsArrayList; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; diff --git a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java index 89031f68aba97..52ee6fd10ced7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java @@ -112,7 +112,7 @@ import java.util.stream.Collectors; import static org.opensearch.action.admin.cluster.node.stats.NodesStatsRequest.Metric.FS; -import static org.opensearch.common.util.CollectionUtils.iterableAsArrayList; +import static org.opensearch.core.common.util.CollectionUtils.iterableAsArrayList; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAllSuccessful; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java index a675eb7c77344..5b72eaca0b4e5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java @@ -46,7 +46,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.index.IndexService; import org.opensearch.index.IndexSettings; import org.opensearch.index.shard.DocsStats; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java index 7775618ba5b13..f7195c7cb4d2c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java @@ -11,7 +11,7 @@ import org.junit.After; import org.junit.Before; import org.opensearch.action.index.IndexRequestBuilder; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.plugins.Plugin; import org.opensearch.search.aggregations.AggregationTestScriptsPlugin; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java index 20fc6aaee20c9..6a62dabd98bce 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -53,7 +53,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; -import static org.opensearch.common.util.CollectionUtils.iterableAsArrayList; +import static org.opensearch.core.common.util.CollectionUtils.iterableAsArrayList; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; import static org.opensearch.index.query.QueryBuilders.termQuery; import static org.opensearch.search.aggregations.AggregationBuilders.filter; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java index 2660dbe0a88ed..ad3fd6517d1b1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -34,7 +34,7 @@ import org.apache.logging.log4j.LogManager; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.plugins.Plugin; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index adf027222d7d9..602cfe344b496 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -34,7 +34,7 @@ import org.apache.logging.log4j.LogManager; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.plugins.Plugin; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java index fda15f9b90ea2..2c05ed0bac44a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -34,7 +34,7 @@ import org.apache.logging.log4j.LogManager; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.plugins.Plugin; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java index 8eb957733944d..764d40eb7bc8e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java @@ -78,7 +78,7 @@ import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; -import static org.opensearch.common.util.CollectionUtils.iterableAsArrayList; +import static org.opensearch.core.common.util.CollectionUtils.iterableAsArrayList; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAllSuccessful; diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java index efd072fce511d..2a9f8cf67a607 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java @@ -56,7 +56,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.opensearch.action.admin.cluster.node.stats.NodesStatsRequest.Metric.FS; -import static org.opensearch.common.util.CollectionUtils.iterableAsArrayList; +import static org.opensearch.core.common.util.CollectionUtils.iterableAsArrayList; @ThreadLeakFilters(filters = CleanerDaemonThreadLeakFilter.class) public final class SearchableSnapshotIT extends AbstractSnapshotIntegTestCase { diff --git a/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java b/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java index fe26c313d72b2..e93e5cdcc3f7b 100644 --- a/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java +++ b/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java @@ -39,7 +39,7 @@ import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.PriorityQueue; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import java.util.ArrayList; import java.util.HashSet; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/health/TransportClusterHealthAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/health/TransportClusterHealthAction.java index d84179fa6bc0a..09f6b7cd06cda 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -60,7 +60,7 @@ import org.opensearch.common.inject.Inject; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.core.common.Strings; import org.opensearch.discovery.ClusterManagerNotDiscoveredException; import org.opensearch.discovery.Discovery; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index 78952805e5b49..b49c18082d0dc 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -48,7 +48,7 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.common.util.set.Sets; import org.opensearch.core.common.Strings; import org.opensearch.core.index.shard.ShardId; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexRequest.java index 8c6dde80d8d97..0f98550343a13 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexRequest.java @@ -39,7 +39,7 @@ import org.opensearch.action.support.master.AcknowledgedRequest; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexResponse.java index 8e58ee92db80a..a3c599a54a10c 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexResponse.java @@ -39,7 +39,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/datastream/DeleteDataStreamAction.java b/server/src/main/java/org/opensearch/action/admin/indices/datastream/DeleteDataStreamAction.java index e5ae7cd582481..5f5be3c166dbc 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/datastream/DeleteDataStreamAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/datastream/DeleteDataStreamAction.java @@ -59,7 +59,7 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.regex.Regex; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.core.common.Strings; import org.opensearch.core.index.Index; import org.opensearch.snapshots.SnapshotInProgressException; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/delete/DeleteIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/delete/DeleteIndexRequest.java index 35fb6a8748941..006da6b3cbb09 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/delete/DeleteIndexRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/delete/DeleteIndexRequest.java @@ -38,7 +38,7 @@ import org.opensearch.action.support.master.AcknowledgedRequest; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java index 2588dfd718e71..373331eb1554b 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -43,7 +43,7 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; @@ -251,7 +251,7 @@ public static XContentBuilder simpleMapping(String... source) { builder.startObject(fieldName); String[] s1 = Strings.splitStringByCommaToArray(source[i]); for (String s : s1) { - String[] s2 = org.opensearch.common.Strings.split(s, "="); + String[] s2 = Strings.split(s, "="); if (s2.length != 2) { throw new IllegalArgumentException("malformed " + s); } @@ -271,7 +271,7 @@ public static XContentBuilder simpleMapping(String... source) { builder.startObject(fieldName); String[] s1 = Strings.splitStringByCommaToArray(source[i]); for (String s : s1) { - String[] s2 = org.opensearch.common.Strings.split(s, "="); + String[] s2 = Strings.split(s, "="); if (s2.length != 2) { throw new IllegalArgumentException("malformed " + s); } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexRequest.java index d42f3699765e7..16451e311e7d3 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/open/OpenIndexRequest.java @@ -39,7 +39,7 @@ import org.opensearch.action.support.master.AcknowledgedRequest; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockRequest.java index ca5a17f0a1520..b5097f96fe52b 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockRequest.java @@ -39,7 +39,7 @@ import org.opensearch.cluster.metadata.IndexMetadata.APIBlock; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import java.io.IOException; import java.util.Objects; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockResponse.java index 39268d752c8c5..f90bdaca510ae 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockResponse.java @@ -39,7 +39,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/settings/get/TransportGetSettingsAction.java b/server/src/main/java/org/opensearch/action/admin/indices/settings/get/TransportGetSettingsAction.java index 22cbcf804f9d2..d3bf275823278 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/settings/get/TransportGetSettingsAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/settings/get/TransportGetSettingsAction.java @@ -47,7 +47,7 @@ import org.opensearch.common.settings.IndexScopedSettings; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsFilter; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.core.index.Index; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; diff --git a/server/src/main/java/org/opensearch/action/search/SearchPhaseExecutionException.java b/server/src/main/java/org/opensearch/action/search/SearchPhaseExecutionException.java index b3ed42824e91a..998ba720c4523 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchPhaseExecutionException.java +++ b/server/src/main/java/org/opensearch/action/search/SearchPhaseExecutionException.java @@ -37,7 +37,7 @@ import org.opensearch.core.action.ShardOperationFailedException; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.rest.RestStatus; diff --git a/server/src/main/java/org/opensearch/action/support/tasks/BaseTasksRequest.java b/server/src/main/java/org/opensearch/action/support/tasks/BaseTasksRequest.java index 43ece159247bc..f8d96d57b341b 100644 --- a/server/src/main/java/org/opensearch/action/support/tasks/BaseTasksRequest.java +++ b/server/src/main/java/org/opensearch/action/support/tasks/BaseTasksRequest.java @@ -38,7 +38,7 @@ import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.regex.Regex; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.core.common.Strings; import org.opensearch.tasks.Task; import org.opensearch.tasks.TaskId; diff --git a/server/src/main/java/org/opensearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/opensearch/cluster/metadata/IndexNameExpressionResolver.java index 52a76e9dd90f1..c7ef959d6a080 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/IndexNameExpressionResolver.java @@ -44,7 +44,7 @@ import org.opensearch.common.time.DateFormatter; import org.opensearch.common.time.DateMathParser; import org.opensearch.common.time.DateUtils; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.common.util.set.Sets; import org.opensearch.core.common.Strings; diff --git a/server/src/main/java/org/opensearch/cluster/routing/RotationShardShuffler.java b/server/src/main/java/org/opensearch/cluster/routing/RotationShardShuffler.java index c49ad09b85344..a15d96e9adc99 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/RotationShardShuffler.java +++ b/server/src/main/java/org/opensearch/cluster/routing/RotationShardShuffler.java @@ -32,7 +32,7 @@ package org.opensearch.cluster.routing; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; diff --git a/server/src/main/java/org/opensearch/common/Strings.java b/server/src/main/java/org/opensearch/common/Strings.java index e9f4d32ed2664..e3404d6ffb297 100644 --- a/server/src/main/java/org/opensearch/common/Strings.java +++ b/server/src/main/java/org/opensearch/common/Strings.java @@ -36,14 +36,12 @@ import org.opensearch.ExceptionsHelper; import org.opensearch.OpenSearchException; import org.opensearch.core.common.bytes.BytesReference; -import org.opensearch.common.util.CollectionUtils; import org.opensearch.core.xcontent.MediaType; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import java.io.IOException; import java.util.Arrays; -import java.util.Iterator; import java.util.Set; import static java.util.Collections.unmodifiableSet; @@ -131,30 +129,6 @@ public static boolean validFileNameExcludingAstrix(String fileName) { return true; } - /** - * Split a String at the first occurrence of the delimiter. - * Does not include the delimiter in the result. - * - * @param toSplit the string to split - * @param delimiter to split the string up with - * @return a two element array with index 0 being before the delimiter, and - * index 1 being after the delimiter (neither element includes the delimiter); - * or null if the delimiter wasn't found in the given input String - */ - public static String[] split(String toSplit, String delimiter) { - if (org.opensearch.core.common.Strings.hasLength(toSplit) == false - || org.opensearch.core.common.Strings.hasLength(delimiter) == false) { - return null; - } - int offset = toSplit.indexOf(delimiter); - if (offset < 0) { - return null; - } - String beforeDelimiter = toSplit.substring(0, offset); - String afterDelimiter = toSplit.substring(offset + delimiter.length()); - return new String[] { beforeDelimiter, afterDelimiter }; - } - private Strings() {} public static byte[] toUTF8Bytes(CharSequence charSequence) { @@ -166,38 +140,6 @@ public static byte[] toUTF8Bytes(CharSequence charSequence, BytesRefBuilder spar return Arrays.copyOf(spare.bytes(), spare.length()); } - /** - * Return substring(beginIndex, endIndex) that is impervious to string length. - */ - public static String substring(String s, int beginIndex, int endIndex) { - if (s == null) { - return s; - } - - int realEndIndex = s.length() > 0 ? s.length() - 1 : 0; - - if (endIndex > realEndIndex) { - return s.substring(beginIndex); - } else { - return s.substring(beginIndex, endIndex); - } - } - - /** - * If an array only consists of zero or one element, which is "*" or "_all" return an empty array - * which is usually used as everything - */ - public static boolean isAllOrWildcard(String[] data) { - return CollectionUtils.isEmpty(data) || data.length == 1 && isAllOrWildcard(data[0]); - } - - /** - * Returns `true` if the string is `_all` or `*`. - */ - public static boolean isAllOrWildcard(String data) { - return "_all".equals(data) || "*".equals(data); - } - /** * Return a {@link String} that is the json representation of the provided {@link ToXContent}. * Wraps the output into an anonymous object if needed. The content is not pretty-printed @@ -276,59 +218,4 @@ private static XContentBuilder createBuilder(MediaType mediaType, boolean pretty } return builder; } - - /** - * Truncates string to a length less than length. Backtracks to throw out - * high surrogates. - */ - public static String cleanTruncate(String s, int length) { - if (s == null) { - return s; - } - /* - * Its pretty silly for you to truncate to 0 length but just in case - * someone does this shouldn't break. - */ - if (length == 0) { - return ""; - } - if (length >= s.length()) { - return s; - } - if (Character.isHighSurrogate(s.charAt(length - 1))) { - length--; - } - return s.substring(0, length); - } - - public static String padStart(String s, int minimumLength, char c) { - if (s == null) { - throw new NullPointerException("s"); - } - if (s.length() >= minimumLength) { - return s; - } else { - StringBuilder sb = new StringBuilder(minimumLength); - for (int i = s.length(); i < minimumLength; i++) { - sb.append(c); - } - - sb.append(s); - return sb.toString(); - } - } - - public static String toLowercaseAscii(String in) { - StringBuilder out = new StringBuilder(); - Iterator iter = in.codePoints().iterator(); - while (iter.hasNext()) { - int codepoint = iter.next(); - if (codepoint > 128) { - out.appendCodePoint(codepoint); - } else { - out.appendCodePoint(Character.toLowerCase(codepoint)); - } - } - return out.toString(); - } } diff --git a/server/src/main/java/org/opensearch/common/logging/Loggers.java b/server/src/main/java/org/opensearch/common/logging/Loggers.java index 7b19bd4b4be25..9a88afef90867 100644 --- a/server/src/main/java/org/opensearch/common/logging/Loggers.java +++ b/server/src/main/java/org/opensearch/common/logging/Loggers.java @@ -46,7 +46,7 @@ import java.util.Map; -import static org.opensearch.common.util.CollectionUtils.asArrayList; +import static org.opensearch.core.common.util.CollectionUtils.asArrayList; /** * A set of utilities around Logging. diff --git a/server/src/main/java/org/opensearch/common/regex/Regex.java b/server/src/main/java/org/opensearch/common/regex/Regex.java index 14716fdff6d2e..396af77c8a751 100644 --- a/server/src/main/java/org/opensearch/common/regex/Regex.java +++ b/server/src/main/java/org/opensearch/common/regex/Regex.java @@ -122,8 +122,8 @@ public static boolean simpleMatch(String pattern, String str, boolean caseInsens return false; } if (caseInsensitive) { - pattern = org.opensearch.common.Strings.toLowercaseAscii(pattern); - str = org.opensearch.common.Strings.toLowercaseAscii(str); + pattern = Strings.toLowercaseAscii(pattern); + str = Strings.toLowercaseAscii(str); } return simpleMatchWithNormalizedStrings(pattern, str); } diff --git a/server/src/main/java/org/opensearch/identity/tokens/RestTokenExtractor.java b/server/src/main/java/org/opensearch/identity/tokens/RestTokenExtractor.java index ae200c7461a60..2d1d4d619f44d 100644 --- a/server/src/main/java/org/opensearch/identity/tokens/RestTokenExtractor.java +++ b/server/src/main/java/org/opensearch/identity/tokens/RestTokenExtractor.java @@ -9,7 +9,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.rest.RestRequest; import java.util.Collections; diff --git a/server/src/main/java/org/opensearch/index/IndexingSlowLog.java b/server/src/main/java/org/opensearch/index/IndexingSlowLog.java index 86c5d4542e5fa..1899c9c77f26a 100644 --- a/server/src/main/java/org/opensearch/index/IndexingSlowLog.java +++ b/server/src/main/java/org/opensearch/index/IndexingSlowLog.java @@ -36,13 +36,13 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.util.StringBuilders; import org.opensearch.common.Booleans; -import org.opensearch.common.Strings; import org.opensearch.common.logging.OpenSearchLogMessage; import org.opensearch.common.logging.Loggers; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentHelper; +import org.opensearch.core.common.Strings; import org.opensearch.core.index.Index; import org.opensearch.index.engine.Engine; import org.opensearch.index.mapper.ParsedDocument; diff --git a/server/src/main/java/org/opensearch/index/analysis/CustomAnalyzer.java b/server/src/main/java/org/opensearch/index/analysis/CustomAnalyzer.java index 8db5c89d75808..b0f4687c0b8b1 100644 --- a/server/src/main/java/org/opensearch/index/analysis/CustomAnalyzer.java +++ b/server/src/main/java/org/opensearch/index/analysis/CustomAnalyzer.java @@ -35,7 +35,7 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import java.io.Reader; diff --git a/server/src/main/java/org/opensearch/index/analysis/ReloadableCustomAnalyzer.java b/server/src/main/java/org/opensearch/index/analysis/ReloadableCustomAnalyzer.java index ba8996eb9c17b..c1bfca93b90f7 100644 --- a/server/src/main/java/org/opensearch/index/analysis/ReloadableCustomAnalyzer.java +++ b/server/src/main/java/org/opensearch/index/analysis/ReloadableCustomAnalyzer.java @@ -37,7 +37,7 @@ import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.util.CloseableThreadLocal; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import java.io.Reader; import java.util.Map; diff --git a/server/src/main/java/org/opensearch/index/engine/CompletionStatsCache.java b/server/src/main/java/org/opensearch/index/engine/CompletionStatsCache.java index c6cf7e8c1b53f..fb4e868a8903b 100644 --- a/server/src/main/java/org/opensearch/index/engine/CompletionStatsCache.java +++ b/server/src/main/java/org/opensearch/index/engine/CompletionStatsCache.java @@ -42,7 +42,7 @@ import org.opensearch.common.FieldMemoryStats; import org.opensearch.common.Nullable; import org.opensearch.common.regex.Regex; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.search.suggest.completion.CompletionStats; import java.util.HashMap; diff --git a/server/src/main/java/org/opensearch/index/fielddata/ShardFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/ShardFieldData.java index 138b417571784..16f0fb90ddc19 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/ShardFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/ShardFieldData.java @@ -36,7 +36,7 @@ import org.opensearch.common.FieldMemoryStats; import org.opensearch.common.metrics.CounterMetric; import org.opensearch.common.regex.Regex; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.core.index.shard.ShardId; diff --git a/server/src/main/java/org/opensearch/index/mapper/BinaryFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/BinaryFieldMapper.java index a55e55111ffa2..f83ef8aaf6845 100644 --- a/server/src/main/java/org/opensearch/index/mapper/BinaryFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/BinaryFieldMapper.java @@ -41,7 +41,7 @@ import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.plain.BytesBinaryIndexFieldData; diff --git a/server/src/main/java/org/opensearch/index/mapper/IndexFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/IndexFieldMapper.java index 8cc307f7697c6..982b0eb909443 100644 --- a/server/src/main/java/org/opensearch/index/mapper/IndexFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/IndexFieldMapper.java @@ -34,7 +34,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.plain.ConstantIndexFieldData; import org.opensearch.index.query.QueryShardContext; diff --git a/server/src/main/java/org/opensearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/SourceFieldMapper.java index deda9883bc07b..685d0871d0230 100644 --- a/server/src/main/java/org/opensearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/SourceFieldMapper.java @@ -41,7 +41,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.collect.Tuple; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.core.common.bytes.BytesReference; diff --git a/server/src/main/java/org/opensearch/index/query/GeoValidationMethod.java b/server/src/main/java/org/opensearch/index/query/GeoValidationMethod.java index aad3e50a0acd5..559c084325abb 100644 --- a/server/src/main/java/org/opensearch/index/query/GeoValidationMethod.java +++ b/server/src/main/java/org/opensearch/index/query/GeoValidationMethod.java @@ -35,7 +35,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import java.io.IOException; diff --git a/server/src/main/java/org/opensearch/index/query/Operator.java b/server/src/main/java/org/opensearch/index/query/Operator.java index 31caca0f60caa..ee8c93ce76ecb 100644 --- a/server/src/main/java/org/opensearch/index/query/Operator.java +++ b/server/src/main/java/org/opensearch/index/query/Operator.java @@ -36,7 +36,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import java.io.IOException; import java.util.Locale; diff --git a/server/src/main/java/org/opensearch/index/query/SimpleQueryStringBuilder.java b/server/src/main/java/org/opensearch/index/query/SimpleQueryStringBuilder.java index cf9f86b6f6341..2a759415c2992 100644 --- a/server/src/main/java/org/opensearch/index/query/SimpleQueryStringBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SimpleQueryStringBuilder.java @@ -40,7 +40,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.lucene.search.Queries; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; diff --git a/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java b/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java index 2f03eb66bd71e..3159d8bfc3c85 100644 --- a/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java +++ b/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java @@ -62,7 +62,7 @@ import static java.util.Collections.emptyList; import static java.util.Collections.unmodifiableList; import static org.opensearch.common.unit.TimeValue.timeValueNanos; -import static org.opensearch.common.util.CollectionUtils.isEmpty; +import static org.opensearch.core.common.util.CollectionUtils.isEmpty; /** * A scrollable source of hits from a {@linkplain Client} instance. diff --git a/server/src/main/java/org/opensearch/index/search/stats/ShardSearchStats.java b/server/src/main/java/org/opensearch/index/search/stats/ShardSearchStats.java index 6d0eb3a5949ca..6f6ebd5545c7a 100644 --- a/server/src/main/java/org/opensearch/index/search/stats/ShardSearchStats.java +++ b/server/src/main/java/org/opensearch/index/search/stats/ShardSearchStats.java @@ -36,7 +36,7 @@ import org.opensearch.common.metrics.CounterMetric; import org.opensearch.common.metrics.MeanMetric; import org.opensearch.common.regex.Regex; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.index.shard.SearchOperationListener; import org.opensearch.search.internal.ReaderContext; import org.opensearch.search.internal.SearchContext; diff --git a/server/src/main/java/org/opensearch/indices/IndicesService.java b/server/src/main/java/org/opensearch/indices/IndicesService.java index 7be824d95b421..844f4fec706f3 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesService.java +++ b/server/src/main/java/org/opensearch/indices/IndicesService.java @@ -193,7 +193,7 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; import static org.opensearch.common.collect.MapBuilder.newMapBuilder; -import static org.opensearch.common.util.CollectionUtils.arrayAsArrayList; +import static org.opensearch.core.common.util.CollectionUtils.arrayAsArrayList; import static org.opensearch.common.util.concurrent.OpenSearchExecutors.daemonThreadFactory; import static org.opensearch.index.IndexService.IndexCreationContext.CREATE_INDEX; import static org.opensearch.index.IndexService.IndexCreationContext.METADATA_VERIFICATION; diff --git a/server/src/main/java/org/opensearch/rest/RestRequest.java b/server/src/main/java/org/opensearch/rest/RestRequest.java index fb497a17cde72..b61a998276532 100644 --- a/server/src/main/java/org/opensearch/rest/RestRequest.java +++ b/server/src/main/java/org/opensearch/rest/RestRequest.java @@ -37,18 +37,17 @@ import org.opensearch.common.CheckedConsumer; import org.opensearch.common.Nullable; import org.opensearch.common.SetOnce; -import org.opensearch.common.Strings; -import org.opensearch.core.common.bytes.BytesArray; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.collect.Tuple; -import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.MediaType; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.http.HttpChannel; import org.opensearch.http.HttpRequest; @@ -337,7 +336,7 @@ public final long getRequestId() { } /** - * The {@link XContentType} that was parsed from the {@code Content-Type} header. This value will be {@code null} in the case of + * The {@link MediaType} that was parsed from the {@code Content-Type} header. This value will be {@code null} in the case of * a request without a valid {@code Content-Type} header, a request without content ({@link #hasContent()}, or a plain text request */ @Nullable @@ -462,7 +461,7 @@ public String[] paramAsStringArray(String key, String[] defaultValue) { if (value == null) { return defaultValue; } - return org.opensearch.core.common.Strings.splitStringByCommaToArray(value); + return Strings.splitStringByCommaToArray(value); } public String[] paramAsStringArrayOrEmptyIfAll(String key) { diff --git a/server/src/main/java/org/opensearch/rest/action/RestActions.java b/server/src/main/java/org/opensearch/rest/action/RestActions.java index 7508871d3ab75..fc3d595afa70e 100644 --- a/server/src/main/java/org/opensearch/rest/action/RestActions.java +++ b/server/src/main/java/org/opensearch/rest/action/RestActions.java @@ -41,7 +41,7 @@ import org.opensearch.core.ParseField; import org.opensearch.core.common.ParsingException; import org.opensearch.common.lucene.uid.Versions; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContent.Params; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestNodeAttrsAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestNodeAttrsAction.java index 7b84b3f655522..5ead69320fefd 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestNodeAttrsAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestNodeAttrsAction.java @@ -40,9 +40,9 @@ import org.opensearch.client.node.NodeClient; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.node.DiscoveryNodes; -import org.opensearch.common.Strings; import org.opensearch.common.Table; import org.opensearch.common.logging.DeprecationLogger; +import org.opensearch.core.common.Strings; import org.opensearch.monitor.process.ProcessInfo; import org.opensearch.rest.RestRequest; import org.opensearch.rest.RestResponse; diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java index e29898624386c..5a0f85fea8e5d 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java @@ -44,10 +44,10 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.node.DiscoveryNodeRole; import org.opensearch.cluster.node.DiscoveryNodes; -import org.opensearch.common.Strings; import org.opensearch.common.Table; import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.network.NetworkAddress; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.http.HttpInfo; diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestTasksAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestTasksAction.java index d30086fb8cd55..560b88787ae09 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestTasksAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestTasksAction.java @@ -38,10 +38,10 @@ import org.opensearch.client.node.NodeClient; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.node.DiscoveryNodes; -import org.opensearch.common.Strings; import org.opensearch.common.Table; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.common.Strings; import org.opensearch.rest.RestRequest; import org.opensearch.rest.RestResponse; import org.opensearch.rest.action.RestResponseListener; diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index b244290e8ae74..0a7e355e29c4d 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -63,7 +63,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.BigArrays; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.common.util.concurrent.ConcurrentMapLong; import org.opensearch.common.util.io.IOUtils; diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalScriptedMetric.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalScriptedMetric.java index 6150940512406..fbcf4a4d48603 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalScriptedMetric.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalScriptedMetric.java @@ -34,7 +34,7 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.script.Script; import org.opensearch.script.ScriptedMetricAggContexts; diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/ScriptedMetricAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/ScriptedMetricAggregator.java index f60f402939462..99a546ae4050a 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/ScriptedMetricAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/ScriptedMetricAggregator.java @@ -37,7 +37,7 @@ import org.apache.lucene.search.ScoreMode; import org.opensearch.common.Nullable; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.common.util.ObjectArray; import org.opensearch.common.lease.Releasables; import org.opensearch.script.Script; diff --git a/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSource.java b/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSource.java index e53118669385b..67d311db964d6 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSource.java +++ b/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSource.java @@ -44,7 +44,7 @@ import org.opensearch.common.Rounding; import org.opensearch.common.Rounding.Prepared; import org.opensearch.common.lucene.ScorerAware; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.index.fielddata.AbstractSortingNumericDocValues; import org.opensearch.index.fielddata.DocValueBits; import org.opensearch.index.fielddata.GeoShapeValue; diff --git a/server/src/main/java/org/opensearch/search/aggregations/support/values/ScriptBytesValues.java b/server/src/main/java/org/opensearch/search/aggregations/support/values/ScriptBytesValues.java index e53d9b83a814c..349bd8e14edf6 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/support/values/ScriptBytesValues.java +++ b/server/src/main/java/org/opensearch/search/aggregations/support/values/ScriptBytesValues.java @@ -33,7 +33,7 @@ import org.apache.lucene.search.Scorable; import org.opensearch.common.lucene.ScorerAware; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.index.fielddata.SortedBinaryDocValues; import org.opensearch.index.fielddata.SortingBinaryDocValues; import org.opensearch.script.AggregationScript; diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/ScriptFieldsPhase.java b/server/src/main/java/org/opensearch/search/fetch/subphase/ScriptFieldsPhase.java index aad20f0746f58..67d1863050a7b 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/ScriptFieldsPhase.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/ScriptFieldsPhase.java @@ -33,7 +33,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.opensearch.common.document.DocumentField; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.script.FieldScript; import org.opensearch.search.fetch.FetchContext; import org.opensearch.search.fetch.FetchSubPhase; diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/FastVectorHighlighter.java b/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/FastVectorHighlighter.java index c087ad6b71344..69f86bb91cc6e 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/FastVectorHighlighter.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/FastVectorHighlighter.java @@ -48,7 +48,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.core.common.text.Text; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.TextSearchInfo; import org.opensearch.search.fetch.FetchSubPhase; diff --git a/server/src/main/java/org/opensearch/tasks/TaskId.java b/server/src/main/java/org/opensearch/tasks/TaskId.java index e7420fce397e2..50e1e0a8c00b5 100644 --- a/server/src/main/java/org/opensearch/tasks/TaskId.java +++ b/server/src/main/java/org/opensearch/tasks/TaskId.java @@ -33,7 +33,7 @@ package org.opensearch.tasks; import org.opensearch.OpenSearchParseException; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/watcher/FileWatcher.java b/server/src/main/java/org/opensearch/watcher/FileWatcher.java index c27ade408e3af..2bf93804c6ecd 100644 --- a/server/src/main/java/org/opensearch/watcher/FileWatcher.java +++ b/server/src/main/java/org/opensearch/watcher/FileWatcher.java @@ -34,7 +34,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.core.util.FileSystemUtils; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import java.io.IOException; import java.nio.file.Files; diff --git a/server/src/test/java/org/opensearch/action/admin/indices/settings/put/UpdateSettingsRequestSerializationTests.java b/server/src/test/java/org/opensearch/action/admin/indices/settings/put/UpdateSettingsRequestSerializationTests.java index ae632d22a3310..e7e077c55fdc5 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/settings/put/UpdateSettingsRequestSerializationTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/settings/put/UpdateSettingsRequestSerializationTests.java @@ -37,7 +37,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.Settings.Builder; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.test.AbstractWireSerializingTestCase; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/AddIncrementallyTests.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/AddIncrementallyTests.java index 16ff5f753d286..89cf5fe4f76c3 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/AddIncrementallyTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/AddIncrementallyTests.java @@ -46,7 +46,7 @@ import org.opensearch.cluster.routing.RoutingTable; import org.opensearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.hamcrest.Matcher; import org.hamcrest.Matchers; diff --git a/server/src/test/java/org/opensearch/common/StringsTests.java b/server/src/test/java/org/opensearch/common/StringsTests.java index 50f7be8be170d..ea756373181c5 100644 --- a/server/src/test/java/org/opensearch/common/StringsTests.java +++ b/server/src/test/java/org/opensearch/common/StringsTests.java @@ -32,7 +32,7 @@ package org.opensearch.common; -import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.test.OpenSearchTestCase; @@ -40,46 +40,9 @@ import java.util.Collections; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.is; public class StringsTests extends OpenSearchTestCase { - public void testIsAllOrWildCardString() { - assertThat(Strings.isAllOrWildcard("_all"), is(true)); - assertThat(Strings.isAllOrWildcard("*"), is(true)); - assertThat(Strings.isAllOrWildcard("foo"), is(false)); - assertThat(Strings.isAllOrWildcard(""), is(false)); - assertThat(Strings.isAllOrWildcard((String) null), is(false)); - } - - public void testSubstring() { - assertEquals(null, Strings.substring(null, 0, 1000)); - assertEquals("foo", Strings.substring("foo", 0, 1000)); - assertEquals("foo", Strings.substring("foo", 0, 3)); - assertEquals("oo", Strings.substring("foo", 1, 3)); - assertEquals("oo", Strings.substring("foo", 1, 100)); - assertEquals("f", Strings.substring("foo", 0, 1)); - } - - public void testCleanTruncate() { - assertEquals(null, Strings.cleanTruncate(null, 10)); - assertEquals("foo", Strings.cleanTruncate("foo", 10)); - assertEquals("foo", Strings.cleanTruncate("foo", 3)); - // Throws out high surrogates - assertEquals("foo", Strings.cleanTruncate("foo\uD83D\uDEAB", 4)); - // But will keep the whole character - assertEquals("foo\uD83D\uDEAB", Strings.cleanTruncate("foo\uD83D\uDEAB", 5)); - /* - * Doesn't take care around combining marks. This example has its - * meaning changed because that last codepoint is supposed to combine - * backwards into the find "o" and be represented as the "o" with a - * circle around it with a slash through it. As in "no 'o's allowed - * here. - */ - assertEquals("o", Strings.cleanTruncate("o\uD83D\uDEAB", 1)); - assertEquals("", Strings.cleanTruncate("foo", 0)); - } - public void testToStringToXContent() { final ToXContent toXContent; final boolean error; @@ -104,7 +67,7 @@ public void testToStringToXContent() { } } - String toString = Strings.toString(XContentType.JSON, toXContent); + String toString = Strings.toString(MediaTypeRegistry.JSON, toXContent); if (error) { assertThat(toString, containsString("\"error\":\"error building toString out of XContent:")); assertThat(toString, containsString("\"stack_trace\":")); @@ -117,10 +80,10 @@ public void testToStringToXContent() { public void testToStringToXContentWithOrWithoutParams() { ToXContent toXContent = (builder, params) -> builder.field("color_from_param", params.param("color", "red")); // Rely on the default value of "color" param when params are not passed - assertThat(Strings.toString(XContentType.JSON, toXContent), containsString("\"color_from_param\":\"red\"")); + assertThat(Strings.toString(MediaTypeRegistry.JSON, toXContent), containsString("\"color_from_param\":\"red\"")); // Pass "color" param explicitly assertThat( - Strings.toString(XContentType.JSON, toXContent, new ToXContent.MapParams(Collections.singletonMap("color", "blue"))), + Strings.toString(MediaTypeRegistry.JSON, toXContent, new ToXContent.MapParams(Collections.singletonMap("color", "blue"))), containsString("\"color_from_param\":\"blue\"") ); } diff --git a/server/src/test/java/org/opensearch/common/collect/EvictingQueueTests.java b/server/src/test/java/org/opensearch/common/collect/EvictingQueueTests.java index 9a9e0efcdb190..e976697bd59c8 100644 --- a/server/src/test/java/org/opensearch/common/collect/EvictingQueueTests.java +++ b/server/src/test/java/org/opensearch/common/collect/EvictingQueueTests.java @@ -29,7 +29,7 @@ package org.opensearch.common.collect; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.test.OpenSearchTestCase; import java.util.Collections; diff --git a/server/src/test/java/org/opensearch/common/util/CollectionUtilsTests.java b/server/src/test/java/org/opensearch/common/util/CollectionUtilsTests.java index f0a0d41aa7aab..1c629bd77fe93 100644 --- a/server/src/test/java/org/opensearch/common/util/CollectionUtilsTests.java +++ b/server/src/test/java/org/opensearch/common/util/CollectionUtilsTests.java @@ -32,6 +32,7 @@ package org.opensearch.common.util; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.test.OpenSearchTestCase; import java.util.ArrayList; @@ -45,7 +46,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; -import static org.opensearch.common.util.CollectionUtils.eagerPartition; +import static org.opensearch.core.common.util.CollectionUtils.eagerPartition; import static org.hamcrest.Matchers.containsString; public class CollectionUtilsTests extends OpenSearchTestCase { diff --git a/server/src/test/java/org/opensearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/opensearch/common/xcontent/BaseXContentTestCase.java index 00ab96dab6a5c..eba41620aef84 100644 --- a/server/src/test/java/org/opensearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/opensearch/common/xcontent/BaseXContentTestCase.java @@ -47,7 +47,7 @@ import org.opensearch.common.io.PathUtils; import org.opensearch.core.common.text.Text; import org.opensearch.common.unit.DistanceUnit; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedObjectNotFoundException; diff --git a/server/src/test/java/org/opensearch/index/query/MoreLikeThisQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/MoreLikeThisQueryBuilderTests.java index 2d950f0994976..49242d34fafb4 100644 --- a/server/src/test/java/org/opensearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -50,7 +50,7 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.lucene.search.MoreLikeThisQuery; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; diff --git a/test/framework/src/main/java/org/opensearch/cluster/OpenSearchAllocationWithConstraintsTestCase.java b/test/framework/src/main/java/org/opensearch/cluster/OpenSearchAllocationWithConstraintsTestCase.java index 55f9f6947b999..7f1d1d3381751 100644 --- a/test/framework/src/main/java/org/opensearch/cluster/OpenSearchAllocationWithConstraintsTestCase.java +++ b/test/framework/src/main/java/org/opensearch/cluster/OpenSearchAllocationWithConstraintsTestCase.java @@ -14,7 +14,7 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.UnassignedInfo; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.CollectionUtils; +import org.opensearch.core.common.util.CollectionUtils; import org.junit.Before; import java.util.Arrays; diff --git a/test/framework/src/main/java/org/opensearch/cluster/coordination/LinearizabilityChecker.java b/test/framework/src/main/java/org/opensearch/cluster/coordination/LinearizabilityChecker.java index 3d3cff6f3be9b..60aacb83e0dc1 100644 --- a/test/framework/src/main/java/org/opensearch/cluster/coordination/LinearizabilityChecker.java +++ b/test/framework/src/main/java/org/opensearch/cluster/coordination/LinearizabilityChecker.java @@ -34,8 +34,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.FixedBitSet; -import org.opensearch.common.Strings; import org.opensearch.common.collect.Tuple; +import org.opensearch.core.common.Strings; import java.util.ArrayList; import java.util.Collection; diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index 422f6d8dfbe7d..2fe3b9ff38ff3 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -202,7 +202,7 @@ import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.opensearch.common.unit.TimeValue.timeValueMillis; -import static org.opensearch.common.util.CollectionUtils.eagerPartition; +import static org.opensearch.core.common.util.CollectionUtils.eagerPartition; import static org.opensearch.discovery.DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING; import static org.opensearch.discovery.SettingsBasedSeedHostsProvider.DISCOVERY_SEED_HOSTS_SETTING; import static org.opensearch.index.IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING; diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java index 7b787e12be64a..f0453ab11f2ac 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java @@ -174,7 +174,7 @@ import java.util.stream.Stream; import static java.util.Collections.emptyMap; -import static org.opensearch.common.util.CollectionUtils.arrayAsArrayList; +import static org.opensearch.core.common.util.CollectionUtils.arrayAsArrayList; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; From 1d28fac490406217e86fee5ffd50fb8fa45934fa Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Fri, 4 Aug 2023 19:04:03 -0500 Subject: [PATCH 59/75] [Refactor] Strings methods other than MediaType (#9126) This refactors all of the remaining utility methods (except those that convert MediaType and XContentBuilder to String) from the :server to :libs:opensearch-core library. This commit is to keep the Strings refactor surface area lean in preparation for cloud native and serverless refactoring. Signed-off-by: Nicholas Walter Knize --- .../org/opensearch/core/common/Strings.java | 359 +++++++++++------- .../create/CreateSnapshotRequest.java | 2 +- .../metadata/MetadataCreateIndexService.java | 7 +- .../MetadataIndexTemplateService.java | 7 +- .../java/org/opensearch/common/Strings.java | 88 ----- .../common/settings/PropertyPlaceholder.java | 2 +- .../BlobStoreIndexShardSnapshot.java | 2 +- .../repositories/RepositoriesService.java | 4 +- .../blobstore/BlobStoreRepository.java | 2 +- .../snapshots/SnapshotsService.java | 8 +- .../MetadataCreateIndexServiceTests.java | 6 +- .../RepositoriesServiceTests.java | 2 +- 12 files changed, 244 insertions(+), 245 deletions(-) diff --git a/libs/core/src/main/java/org/opensearch/core/common/Strings.java b/libs/core/src/main/java/org/opensearch/core/common/Strings.java index 2e610f6047fd4..f0ca12a307313 100644 --- a/libs/core/src/main/java/org/opensearch/core/common/Strings.java +++ b/libs/core/src/main/java/org/opensearch/core/common/Strings.java @@ -8,7 +8,9 @@ package org.opensearch.core.common; +import org.apache.lucene.util.BytesRefBuilder; import org.opensearch.common.Nullable; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.util.CollectionUtils; import java.io.BufferedReader; @@ -25,6 +27,9 @@ import java.util.TreeSet; import java.util.function.Supplier; +import static java.util.Collections.unmodifiableSet; +import static org.opensearch.common.util.set.Sets.newHashSet; + /** * String utility class. * @@ -35,103 +40,29 @@ public class Strings { public static final String UNKNOWN_UUID_VALUE = "_na_"; public static final String[] EMPTY_ARRAY = new String[0]; + public static final Set INVALID_FILENAME_CHARS = unmodifiableSet( + newHashSet('\\', '/', '*', '?', '"', '<', '>', '|', ' ', ',') + ); - /** - * Convenience method to return a Collection as a delimited (e.g. CSV) - * String. E.g. useful for toString() implementations. - * - * @param coll the Collection to display - * @param delim the delimiter to use (probably a ",") - * @param prefix the String to start each element with - * @param suffix the String to end each element with - * @return the delimited String - */ - public static String collectionToDelimitedString(Iterable coll, String delim, String prefix, String suffix) { - StringBuilder sb = new StringBuilder(); - collectionToDelimitedString(coll, delim, prefix, suffix, sb); - return sb.toString(); - } - - public static void collectionToDelimitedString(Iterable coll, String delim, String prefix, String suffix, StringBuilder sb) { - Iterator it = coll.iterator(); - while (it.hasNext()) { - sb.append(prefix).append(it.next()).append(suffix); - if (it.hasNext()) { - sb.append(delim); - } - } - } + // no instance: + private Strings() {} - /** - * Convenience method to return a Collection as a delimited (e.g. CSV) - * String. E.g. useful for toString() implementations. - * - * @param coll the Collection to display - * @param delim the delimiter to use (probably a ",") - * @return the delimited String - */ - public static String collectionToDelimitedString(Iterable coll, String delim) { - return collectionToDelimitedString(coll, delim, "", ""); - } + // --------------------------------------------------------------------- + // General convenience methods for working with Strings + // --------------------------------------------------------------------- - /** - * Convenience method to return a Collection as a CSV String. - * E.g. useful for toString() implementations. - * - * @param coll the Collection to display - * @return the delimited String - */ - public static String collectionToCommaDelimitedString(Iterable coll) { - return collectionToDelimitedString(coll, ","); - } - - /** - * Convenience method to return a String array as a delimited (e.g. CSV) - * String. E.g. useful for toString() implementations. - * - * @param arr the array to display - * @param delim the delimiter to use (probably a ",") - * @return the delimited String - */ - public static String arrayToDelimitedString(Object[] arr, String delim) { - StringBuilder sb = new StringBuilder(); - arrayToDelimitedString(arr, delim, sb); - return sb.toString(); - } - - public static void arrayToDelimitedString(Object[] arr, String delim, StringBuilder sb) { - if (isEmpty(arr)) { - return; - } - for (int i = 0; i < arr.length; i++) { - if (i > 0) { - sb.append(delim); + public static void spaceify(int spaces, String from, StringBuilder to) throws Exception { + try (BufferedReader reader = new BufferedReader(new StringReader(from))) { + String line; + while ((line = reader.readLine()) != null) { + for (int i = 0; i < spaces; i++) { + to.append(' '); + } + to.append(line).append('\n'); } - sb.append(arr[i]); } } - /** - * Convenience method to return a String array as a CSV String. - * E.g. useful for toString() implementations. - * - * @param arr the array to display - * @return the delimited String - */ - public static String arrayToCommaDelimitedString(Object[] arr) { - return arrayToDelimitedString(arr, ","); - } - - /** - * Determine whether the given array is empty: - * i.e. null or of zero length. - * - * @param array the array to check - */ - private static boolean isEmpty(Object[] array) { - return (array == null || array.length == 0); - } - /** * Check that the given CharSequence is neither null nor of length 0. * Note: Will return true for a CharSequence that purely consists of whitespace. @@ -150,6 +81,18 @@ public static boolean hasLength(CharSequence str) { return (str != null && str.length() > 0); } + /** + * Check that the given BytesReference is neither null nor of length 0 + * Note: Will return true for a BytesReference that purely consists of whitespace. + * + * @param bytesReference the BytesReference to check (may be null) + * @return true if the BytesReference is not null and has length + * @see Strings#hasLength(CharSequence) + */ + public static boolean hasLength(final BytesReference bytesReference) { + return (bytesReference != null && bytesReference.length() > 0); + } + /** * Check that the given String is neither null nor of length 0. * Note: Will return true for a String that purely consists of whitespace. @@ -158,7 +101,7 @@ public static boolean hasLength(CharSequence str) { * @return true if the String is not null and has length * @see Strings#hasLength(CharSequence) */ - public static boolean hasLength(String str) { + public static boolean hasLength(final String str) { return hasLength((CharSequence) str); } @@ -175,7 +118,7 @@ public static boolean hasLength(String str) { * @param str the CharSequence to check (may be null) * @return true if the CharSequence is either null or has a zero length */ - public static boolean isEmpty(CharSequence str) { + public static boolean isEmpty(final CharSequence str) { return hasLength(str) == false; } @@ -223,6 +166,42 @@ public static boolean hasText(String str) { return hasText((CharSequence) str); } + /** + * Trim all occurrences of the supplied leading character from the given String. + * + * @param str the String to check + * @param leadingCharacter the leading character to be trimmed + * @return the trimmed String + */ + public static String trimLeadingCharacter(String str, char leadingCharacter) { + if (hasLength(str) == false) { + return str; + } + StringBuilder sb = new StringBuilder(str); + while (sb.length() > 0 && sb.charAt(0) == leadingCharacter) { + sb.deleteCharAt(0); + } + return sb.toString(); + } + + /** + * Test whether the given string matches the given substring + * at the given index. + * + * @param str the original string (or StringBuilder) + * @param index the index in the original string to start matching against + * @param substring the substring to match at the given index + */ + public static boolean substringMatch(CharSequence str, int index, CharSequence substring) { + for (int j = 0; j < substring.length(); j++) { + int i = index + j; + if (i >= str.length() || str.charAt(i) != substring.charAt(j)) { + return false; + } + } + return true; + } + /** * Replace all occurrences of a substring within a string with * another string. @@ -252,24 +231,6 @@ public static String replace(String inString, String oldPattern, String newPatte return sb.toString(); } - /** - * Trim all occurrences of the supplied leading character from the given String. - * - * @param str the String to check - * @param leadingCharacter the leading character to be trimmed - * @return the trimmed String - */ - public static String trimLeadingCharacter(String str, char leadingCharacter) { - if (hasLength(str) == false) { - return str; - } - StringBuilder sb = new StringBuilder(str); - while (sb.length() > 0 && sb.charAt(0) == leadingCharacter) { - sb.deleteCharAt(0); - } - return sb.toString(); - } - /** * Delete all occurrences of the given substring. * @@ -303,16 +264,65 @@ public static String deleteAny(String inString, String charsToDelete) { return sb.toString(); } - public static void spaceify(int spaces, String from, StringBuilder to) throws Exception { - try (BufferedReader reader = new BufferedReader(new StringReader(from))) { - String line; - while ((line = reader.readLine()) != null) { - for (int i = 0; i < spaces; i++) { - to.append(' '); - } - to.append(line).append('\n'); + // --------------------------------------------------------------------- + // Convenience methods for working with formatted Strings + // --------------------------------------------------------------------- + + /** + * Quote the given String with single quotes. + * + * @param str the input String (e.g. "myString") + * @return the quoted String (e.g. "'myString'"), + * or null if the input was null + */ + public static String quote(String str) { + return (str != null ? "'" + str + "'" : null); + } + + /** + * Capitalize a String, changing the first letter to + * upper case as per {@link Character#toUpperCase(char)}. + * No other letters are changed. + * + * @param str the String to capitalize, may be null + * @return the capitalized String, null if null + */ + public static String capitalize(String str) { + return changeFirstCharacterCase(str, true); + } + + private static String changeFirstCharacterCase(String str, boolean capitalize) { + if (str == null || str.length() == 0) { + return str; + } + StringBuilder sb = new StringBuilder(str.length()); + if (capitalize) { + sb.append(Character.toUpperCase(str.charAt(0))); + } else { + sb.append(Character.toLowerCase(str.charAt(0))); + } + sb.append(str.substring(1)); + return sb.toString(); + } + + public static boolean validFileName(String fileName) { + for (int i = 0; i < fileName.length(); i++) { + char c = fileName.charAt(i); + if (INVALID_FILENAME_CHARS.contains(c)) { + return false; } } + return true; + } + + public static boolean validFileNameExcludingAstrix(String fileName) { + for (int i = 0; i < fileName.length(); i++) { + char c = fileName.charAt(i); + if (c != '*' && INVALID_FILENAME_CHARS.contains(c)) { + return false; + } + } + return true; } /** @@ -511,31 +521,91 @@ public static Set commaDelimitedListToSet(String str) { } /** - * Capitalize a String, changing the first letter to - * upper case as per {@link Character#toUpperCase(char)}. - * No other letters are changed. + * Convenience method to return a Collection as a delimited (e.g. CSV) + * String. E.g. useful for toString() implementations. * - * @param str the String to capitalize, may be null - * @return the capitalized String, null if null + * @param coll the Collection to display + * @param delim the delimiter to use (probably a ",") + * @param prefix the String to start each element with + * @param suffix the String to end each element with + * @return the delimited String */ - public static String capitalize(String str) { - return changeFirstCharacterCase(str, true); + public static String collectionToDelimitedString(Iterable coll, String delim, String prefix, String suffix) { + StringBuilder sb = new StringBuilder(); + collectionToDelimitedString(coll, delim, prefix, suffix, sb); + return sb.toString(); } - private static String changeFirstCharacterCase(String str, boolean capitalize) { - if (str == null || str.length() == 0) { - return str; - } - StringBuilder sb = new StringBuilder(str.length()); - if (capitalize) { - sb.append(Character.toUpperCase(str.charAt(0))); - } else { - sb.append(Character.toLowerCase(str.charAt(0))); + public static void collectionToDelimitedString(Iterable coll, String delim, String prefix, String suffix, StringBuilder sb) { + Iterator it = coll.iterator(); + while (it.hasNext()) { + sb.append(prefix).append(it.next()).append(suffix); + if (it.hasNext()) { + sb.append(delim); + } } - sb.append(str.substring(1)); + } + + /** + * Convenience method to return a Collection as a delimited (e.g. CSV) + * String. E.g. useful for toString() implementations. + * + * @param coll the Collection to display + * @param delim the delimiter to use (probably a ",") + * @return the delimited String + */ + public static String collectionToDelimitedString(Iterable coll, String delim) { + return collectionToDelimitedString(coll, delim, "", ""); + } + + /** + * Convenience method to return a Collection as a CSV String. + * E.g. useful for toString() implementations. + * + * @param coll the Collection to display + * @return the delimited String + */ + public static String collectionToCommaDelimitedString(Iterable coll) { + return collectionToDelimitedString(coll, ","); + } + + /** + * Convenience method to return a String array as a delimited (e.g. CSV) + * String. E.g. useful for toString() implementations. + * + * @param arr the array to display + * @param delim the delimiter to use (probably a ",") + * @return the delimited String + */ + public static String arrayToDelimitedString(Object[] arr, String delim) { + StringBuilder sb = new StringBuilder(); + arrayToDelimitedString(arr, delim, sb); return sb.toString(); } + public static void arrayToDelimitedString(Object[] arr, String delim, StringBuilder sb) { + if (isEmpty(arr)) { + return; + } + for (int i = 0; i < arr.length; i++) { + if (i > 0) { + sb.append(delim); + } + sb.append(arr[i]); + } + } + + /** + * Convenience method to return a String array as a CSV String. + * E.g. useful for toString() implementations. + * + * @param arr the array to display + * @return the delimited String + */ + public static String arrayToCommaDelimitedString(Object[] arr) { + return arrayToDelimitedString(arr, ","); + } + /** * Format the double value with a single decimal points, trimming trailing '.0'. */ @@ -559,6 +629,25 @@ public static String format1Decimals(double value, String suffix) { } } + /** + * Determine whether the given array is empty: + * i.e. null or of zero length. + * + * @param array the array to check + */ + private static boolean isEmpty(final Object[] array) { + return (array == null || array.length == 0); + } + + public static byte[] toUTF8Bytes(CharSequence charSequence) { + return toUTF8Bytes(charSequence, new BytesRefBuilder()); + } + + public static byte[] toUTF8Bytes(CharSequence charSequence, BytesRefBuilder spare) { + spare.copyChars(charSequence); + return Arrays.copyOf(spare.bytes(), spare.length()); + } + /** * Return substring(beginIndex, endIndex) that is impervious to string length. */ diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index 3bb552b80084d..ce41066868202 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -74,7 +74,7 @@ *

  • must not contain hash sign ('#')
  • *
  • must not start with underscore ('_')
  • *
  • must be lowercase
  • - *
  • must not contain invalid file name characters {@link org.opensearch.common.Strings#INVALID_FILENAME_CHARS}
  • + *
  • must not contain invalid file name characters {@link org.opensearch.core.common.Strings#INVALID_FILENAME_CHARS}
  • * * * @opensearch.internal diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java index db9964b1a2ff8..fd38763023532 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java @@ -257,11 +257,8 @@ public boolean validateDotIndex(String index, @Nullable Boolean isHidden) { * Validate the name for an index or alias against some static rules. */ public static void validateIndexOrAliasName(String index, BiFunction exceptionCtor) { - if (org.opensearch.common.Strings.validFileName(index) == false) { - throw exceptionCtor.apply( - index, - "must not contain the following characters " + org.opensearch.common.Strings.INVALID_FILENAME_CHARS - ); + if (Strings.validFileName(index) == false) { + throw exceptionCtor.apply(index, "must not contain the following characters " + Strings.INVALID_FILENAME_CHARS); } if (index.isEmpty()) { throw exceptionCtor.apply(index, "must not be empty"); diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java index 4f12313f951bc..2c4e6bdec95b0 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java @@ -1502,12 +1502,9 @@ private void validate(String name, @Nullable Settings settings, List ind if (indexPattern.startsWith("_")) { validationErrors.add("index_pattern [" + indexPattern + "] must not start with '_'"); } - if (org.opensearch.common.Strings.validFileNameExcludingAstrix(indexPattern) == false) { + if (Strings.validFileNameExcludingAstrix(indexPattern) == false) { validationErrors.add( - "index_pattern [" - + indexPattern - + "] must not contain the following characters " - + org.opensearch.common.Strings.INVALID_FILENAME_CHARS + "index_pattern [" + indexPattern + "] must not contain the following characters " + Strings.INVALID_FILENAME_CHARS ); } } diff --git a/server/src/main/java/org/opensearch/common/Strings.java b/server/src/main/java/org/opensearch/common/Strings.java index e3404d6ffb297..cd56fff7b2c62 100644 --- a/server/src/main/java/org/opensearch/common/Strings.java +++ b/server/src/main/java/org/opensearch/common/Strings.java @@ -32,7 +32,6 @@ package org.opensearch.common; -import org.apache.lucene.util.BytesRefBuilder; import org.opensearch.ExceptionsHelper; import org.opensearch.OpenSearchException; import org.opensearch.core.common.bytes.BytesReference; @@ -41,11 +40,6 @@ import org.opensearch.core.xcontent.XContentBuilder; import java.io.IOException; -import java.util.Arrays; -import java.util.Set; - -import static java.util.Collections.unmodifiableSet; -import static org.opensearch.common.util.set.Sets.newHashSet; /** * String utility class. @@ -56,90 +50,8 @@ public class Strings { public static final String[] EMPTY_ARRAY = org.opensearch.core.common.Strings.EMPTY_ARRAY; - // --------------------------------------------------------------------- - // General convenience methods for working with Strings - // --------------------------------------------------------------------- - - /** - * Check that the given BytesReference is neither null nor of length 0 - * Note: Will return true for a BytesReference that purely consists of whitespace. - * - * @param bytesReference the BytesReference to check (may be null) - * @return true if the BytesReference is not null and has length - * @see org.opensearch.core.common.Strings#hasLength(CharSequence) - */ - public static boolean hasLength(BytesReference bytesReference) { - return (bytesReference != null && bytesReference.length() > 0); - } - - /** - * Test whether the given string matches the given substring - * at the given index. - * - * @param str the original string (or StringBuilder) - * @param index the index in the original string to start matching against - * @param substring the substring to match at the given index - */ - public static boolean substringMatch(CharSequence str, int index, CharSequence substring) { - for (int j = 0; j < substring.length(); j++) { - int i = index + j; - if (i >= str.length() || str.charAt(i) != substring.charAt(j)) { - return false; - } - } - return true; - } - - // --------------------------------------------------------------------- - // Convenience methods for working with formatted Strings - // --------------------------------------------------------------------- - - /** - * Quote the given String with single quotes. - * - * @param str the input String (e.g. "myString") - * @return the quoted String (e.g. "'myString'"), - * or null if the input was null - */ - public static String quote(String str) { - return (str != null ? "'" + str + "'" : null); - } - - public static final Set INVALID_FILENAME_CHARS = unmodifiableSet( - newHashSet('\\', '/', '*', '?', '"', '<', '>', '|', ' ', ',') - ); - - public static boolean validFileName(String fileName) { - for (int i = 0; i < fileName.length(); i++) { - char c = fileName.charAt(i); - if (INVALID_FILENAME_CHARS.contains(c)) { - return false; - } - } - return true; - } - - public static boolean validFileNameExcludingAstrix(String fileName) { - for (int i = 0; i < fileName.length(); i++) { - char c = fileName.charAt(i); - if (c != '*' && INVALID_FILENAME_CHARS.contains(c)) { - return false; - } - } - return true; - } - private Strings() {} - public static byte[] toUTF8Bytes(CharSequence charSequence) { - return toUTF8Bytes(charSequence, new BytesRefBuilder()); - } - - public static byte[] toUTF8Bytes(CharSequence charSequence, BytesRefBuilder spare) { - spare.copyChars(charSequence); - return Arrays.copyOf(spare.bytes(), spare.length()); - } - /** * Return a {@link String} that is the json representation of the provided {@link ToXContent}. * Wraps the output into an anonymous object if needed. The content is not pretty-printed diff --git a/server/src/main/java/org/opensearch/common/settings/PropertyPlaceholder.java b/server/src/main/java/org/opensearch/common/settings/PropertyPlaceholder.java index 76b5642949e03..655ffc461cd91 100644 --- a/server/src/main/java/org/opensearch/common/settings/PropertyPlaceholder.java +++ b/server/src/main/java/org/opensearch/common/settings/PropertyPlaceholder.java @@ -32,7 +32,7 @@ package org.opensearch.common.settings; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import java.util.HashSet; import java.util.Objects; diff --git a/server/src/main/java/org/opensearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java b/server/src/main/java/org/opensearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java index 0b27afa5e9517..6a48007234260 100644 --- a/server/src/main/java/org/opensearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java +++ b/server/src/main/java/org/opensearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java @@ -36,8 +36,8 @@ import org.apache.lucene.util.Version; import org.opensearch.OpenSearchParseException; import org.opensearch.core.ParseField; -import org.opensearch.common.Strings; import org.opensearch.common.lucene.Lucene; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContentFragment; diff --git a/server/src/main/java/org/opensearch/repositories/RepositoriesService.java b/server/src/main/java/org/opensearch/repositories/RepositoriesService.java index e66f8ddee5678..b113a8e32c40e 100644 --- a/server/src/main/java/org/opensearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/opensearch/repositories/RepositoriesService.java @@ -57,7 +57,6 @@ import org.opensearch.cluster.service.ClusterManagerTaskKeys; import org.opensearch.cluster.service.ClusterManagerTaskThrottler; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.Strings; import org.opensearch.common.lifecycle.AbstractLifecycleComponent; import org.opensearch.common.regex.Regex; import org.opensearch.common.settings.Setting; @@ -66,6 +65,7 @@ import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.common.util.io.IOUtils; +import org.opensearch.core.common.Strings; import org.opensearch.repositories.blobstore.MeteredBlobStoreRepository; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; @@ -599,7 +599,7 @@ private Repository createRepository(RepositoryMetadata repositoryMetadata, Map new ParameterizedMessage( "deleting snapshots [{}] from repository [{}]", - org.opensearch.core.common.Strings.arrayToCommaDelimitedString(snapshotNames), + Strings.arrayToCommaDelimitedString(snapshotNames), repoName ) ); diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 63c3511a97d2b..b8a31f4d7dda9 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -671,7 +671,11 @@ public void testValidateIndexName() throws Exception { false, new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()) ); - validateIndexName(checkerService, "index?name", "must not contain the following characters " + Strings.INVALID_FILENAME_CHARS); + validateIndexName( + checkerService, + "index?name", + "must not contain the following characters " + org.opensearch.core.common.Strings.INVALID_FILENAME_CHARS + ); validateIndexName(checkerService, "index#name", "must not contain '#'"); diff --git a/server/src/test/java/org/opensearch/repositories/RepositoriesServiceTests.java b/server/src/test/java/org/opensearch/repositories/RepositoriesServiceTests.java index 085a64b439bbe..076f11b679d16 100644 --- a/server/src/test/java/org/opensearch/repositories/RepositoriesServiceTests.java +++ b/server/src/test/java/org/opensearch/repositories/RepositoriesServiceTests.java @@ -47,13 +47,13 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterApplierService; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.Strings; import org.opensearch.common.UUIDs; import org.opensearch.common.blobstore.BlobPath; import org.opensearch.common.blobstore.BlobStore; import org.opensearch.common.lifecycle.Lifecycle; import org.opensearch.common.lifecycle.LifecycleListener; import org.opensearch.common.settings.Settings; +import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.index.mapper.MapperService; import org.opensearch.core.index.shard.ShardId; From c6e4bcd097969f00956c0f4c152540cb610e4f93 Mon Sep 17 00:00:00 2001 From: Poojita Raj Date: Fri, 4 Aug 2023 19:33:18 -0700 Subject: [PATCH 60/75] [Segment Replication] Prioritize replica shard movement during shard relocation (#8875) * add shard movement strategy setting Signed-off-by: Poojita Raj * add tests Signed-off-by: Poojita Raj * add changelog Signed-off-by: Poojita Raj * Add NodeVersionAllocationDecider check Signed-off-by: Poojita Raj * refactoring Signed-off-by: Poojita Raj * add annotation + refactor Signed-off-by: Poojita Raj --------- Signed-off-by: Poojita Raj --- CHANGELOG.md | 1 + .../org/opensearch/cluster/ClusterModule.java | 2 +- .../cluster/routing/RoutingNodes.java | 167 +++++++++++------- .../routing/ShardMovementStrategy.java | 57 ++++++ .../allocator/BalancedShardsAllocator.java | 27 ++- .../allocator/LocalShardsBalancer.java | 27 ++- .../decider/NodeVersionAllocationDecider.java | 32 ++++ .../common/settings/ClusterSettings.java | 1 + .../cluster/routing/RoutingNodesTests.java | 52 +++++- ...s.java => ShardMovementStrategyTests.java} | 65 +++++-- .../NodeVersionAllocationDeciderTests.java | 151 +++++++++++++++- 11 files changed, 491 insertions(+), 91 deletions(-) create mode 100644 server/src/main/java/org/opensearch/cluster/routing/ShardMovementStrategy.java rename server/src/test/java/org/opensearch/cluster/routing/{MovePrimaryFirstTests.java => ShardMovementStrategyTests.java} (60%) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3b81cd3a60deb..0bb21fb425ccf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -82,6 +82,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - [distribution/archives] [Linux] [x64] Provide the variant of the distributions bundled with JRE ([#8195]()https://github.com/opensearch-project/OpenSearch/pull/8195) - Add configuration for file cache size to max remote data ratio to prevent oversubscription of file cache ([#8606](https://github.com/opensearch-project/OpenSearch/pull/8606)) - Disallow compression level to be set for default and best_compression index codecs ([#8737]()https://github.com/opensearch-project/OpenSearch/pull/8737) +- Prioritize replica shard movement during shard relocation ([#8875](https://github.com/opensearch-project/OpenSearch/pull/8875)) ### Dependencies - Bump `org.apache.logging.log4j:log4j-core` from 2.17.1 to 2.20.0 ([#8307](https://github.com/opensearch-project/OpenSearch/pull/8307)) diff --git a/server/src/main/java/org/opensearch/cluster/ClusterModule.java b/server/src/main/java/org/opensearch/cluster/ClusterModule.java index b80fd1d746831..e797a08aba3cd 100644 --- a/server/src/main/java/org/opensearch/cluster/ClusterModule.java +++ b/server/src/main/java/org/opensearch/cluster/ClusterModule.java @@ -359,7 +359,7 @@ public static Collection createAllocationDeciders( addAllocationDecider(deciders, new ConcurrentRebalanceAllocationDecider(settings, clusterSettings)); addAllocationDecider(deciders, new ConcurrentRecoveriesAllocationDecider(settings, clusterSettings)); addAllocationDecider(deciders, new EnableAllocationDecider(settings, clusterSettings)); - addAllocationDecider(deciders, new NodeVersionAllocationDecider()); + addAllocationDecider(deciders, new NodeVersionAllocationDecider(settings)); addAllocationDecider(deciders, new SnapshotInProgressAllocationDecider()); addAllocationDecider(deciders, new RestoreInProgressAllocationDecider()); addAllocationDecider(deciders, new FilterAllocationDecider(settings, clusterSettings)); diff --git a/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java b/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java index 5402218664f6f..c5c480dc25d6e 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java +++ b/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java @@ -1310,100 +1310,131 @@ private void ensureMutable() { } /** - * Creates an iterator over shards interleaving between nodes: The iterator returns the first shard from - * the first node, then the first shard of the second node, etc. until one shard from each node has been returned. - * The iterator then resumes on the first node by returning the second shard and continues until all shards from - * all the nodes have been returned. - * @param movePrimaryFirst if true, all primary shards are iterated over before iterating replica for any node - * @return iterator of shard routings + * Returns iterator of shard routings used by {@link #nodeInterleavedShardIterator(ShardMovementStrategy)} + * @param primaryFirst true when ShardMovementStrategy = ShardMovementStrategy.PRIMARY_FIRST, false when it is ShardMovementStrategy.REPLICA_FIRST */ - public Iterator nodeInterleavedShardIterator(boolean movePrimaryFirst) { + private Iterator buildIteratorForMovementStrategy(boolean primaryFirst) { final Queue> queue = new ArrayDeque<>(); for (Map.Entry entry : nodesToShards.entrySet()) { queue.add(entry.getValue().copyShards().iterator()); } - if (movePrimaryFirst) { - return new Iterator() { - private Queue replicaShards = new ArrayDeque<>(); - private Queue> replicaIterators = new ArrayDeque<>(); - - public boolean hasNext() { - while (!queue.isEmpty()) { - if (queue.peek().hasNext()) { - return true; - } - queue.poll(); - } - if (!replicaShards.isEmpty()) { + return new Iterator() { + private Queue shardRoutings = new ArrayDeque<>(); + private Queue> shardIterators = new ArrayDeque<>(); + + public boolean hasNext() { + while (queue.isEmpty() == false) { + if (queue.peek().hasNext()) { return true; } - while (!replicaIterators.isEmpty()) { - if (replicaIterators.peek().hasNext()) { - return true; - } - replicaIterators.poll(); + queue.poll(); + } + if (!shardRoutings.isEmpty()) { + return true; + } + while (!shardIterators.isEmpty()) { + if (shardIterators.peek().hasNext()) { + return true; } - return false; + shardIterators.poll(); } + return false; + } - public ShardRouting next() { - if (hasNext() == false) { - throw new NoSuchElementException(); - } - while (!queue.isEmpty()) { - Iterator iter = queue.poll(); + public ShardRouting next() { + if (hasNext() == false) { + throw new NoSuchElementException(); + } + while (!queue.isEmpty()) { + Iterator iter = queue.poll(); + if (primaryFirst) { if (iter.hasNext()) { ShardRouting result = iter.next(); if (result.primary()) { queue.offer(iter); return result; } - replicaShards.offer(result); - replicaIterators.offer(iter); + shardRoutings.offer(result); + shardIterators.offer(iter); + } + } else { + while (iter.hasNext()) { + ShardRouting result = iter.next(); + if (result.primary() == false) { + queue.offer(iter); + return result; + } + shardRoutings.offer(result); + shardIterators.offer(iter); } } - if (!replicaShards.isEmpty()) { - return replicaShards.poll(); - } - Iterator replicaIterator = replicaIterators.poll(); - ShardRouting replicaShard = replicaIterator.next(); - replicaIterators.offer(replicaIterator); - - assert !replicaShard.primary(); - return replicaShard; } - - public void remove() { - throw new UnsupportedOperationException(); + if (!shardRoutings.isEmpty()) { + return shardRoutings.poll(); } - }; + Iterator replicaIterator = shardIterators.poll(); + ShardRouting replicaShard = replicaIterator.next(); + shardIterators.offer(replicaIterator); + + assert replicaShard.primary() != primaryFirst; + return replicaShard; + } + + public void remove() { + throw new UnsupportedOperationException(); + } + + }; + } + + /** + * Creates an iterator over shards interleaving between nodes: The iterator returns the first shard from + * the first node, then the first shard of the second node, etc. until one shard from each node has been returned. + * The iterator then resumes on the first node by returning the second shard and continues until all shards from + * all the nodes have been returned. + * @param shardMovementStrategy if ShardMovementStrategy.PRIMARY_FIRST, all primary shards are iterated over before iterating replica for any node + * if ShardMovementStrategy.REPLICA_FIRST, all replica shards are iterated over before iterating primary for any node + * if ShardMovementStrategy.NO_PREFERENCE, order of replica and primary shards doesn't matter in iteration + * @return iterator of shard routings + */ + public Iterator nodeInterleavedShardIterator(ShardMovementStrategy shardMovementStrategy) { + final Queue> queue = new ArrayDeque<>(); + for (Map.Entry entry : nodesToShards.entrySet()) { + queue.add(entry.getValue().copyShards().iterator()); + } + if (shardMovementStrategy == ShardMovementStrategy.PRIMARY_FIRST) { + return buildIteratorForMovementStrategy(true); } else { - return new Iterator() { - @Override - public boolean hasNext() { - while (!queue.isEmpty()) { - if (queue.peek().hasNext()) { - return true; + if (shardMovementStrategy == ShardMovementStrategy.REPLICA_FIRST) { + return buildIteratorForMovementStrategy(false); + } else { + return new Iterator() { + @Override + public boolean hasNext() { + while (!queue.isEmpty()) { + if (queue.peek().hasNext()) { + return true; + } + queue.poll(); } - queue.poll(); + return false; } - return false; - } - @Override - public ShardRouting next() { - if (hasNext() == false) { - throw new NoSuchElementException(); + @Override + public ShardRouting next() { + if (hasNext() == false) { + throw new NoSuchElementException(); + } + Iterator iter = queue.poll(); + queue.offer(iter); + return iter.next(); } - Iterator iter = queue.poll(); - queue.offer(iter); - return iter.next(); - } - public void remove() { - throw new UnsupportedOperationException(); - } - }; + public void remove() { + throw new UnsupportedOperationException(); + } + }; + } } } diff --git a/server/src/main/java/org/opensearch/cluster/routing/ShardMovementStrategy.java b/server/src/main/java/org/opensearch/cluster/routing/ShardMovementStrategy.java new file mode 100644 index 0000000000000..cfdeed5c227b6 --- /dev/null +++ b/server/src/main/java/org/opensearch/cluster/routing/ShardMovementStrategy.java @@ -0,0 +1,57 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.cluster.routing; + +import org.opensearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; + +import java.util.Locale; + +/** + * ShardMovementStrategy defines the order in which shard movement occurs. + * + * ShardMovementStrategy values or rather their string representation to be used with + * {@link BalancedShardsAllocator#SHARD_MOVEMENT_STRATEGY_SETTING} via cluster settings. + * + * @opensearch.internal + */ +public enum ShardMovementStrategy { + /** + * default behavior in which order of shard movement doesn't matter. + */ + NO_PREFERENCE, + + /** + * primary shards are moved first + */ + PRIMARY_FIRST, + + /** + * replica shards are moved first + */ + REPLICA_FIRST; + + public static ShardMovementStrategy parse(String strValue) { + if (strValue == null) { + return null; + } else { + strValue = strValue.toUpperCase(Locale.ROOT); + try { + return ShardMovementStrategy.valueOf(strValue); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("Illegal allocation.shard_movement_strategy value [" + strValue + "]"); + } + } + } + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + +} diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 59d7fab59c266..19e0e318eb805 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -37,6 +37,7 @@ import org.apache.lucene.util.IntroSorter; import org.opensearch.cluster.routing.RoutingNode; import org.opensearch.cluster.routing.RoutingNodes; +import org.opensearch.cluster.routing.ShardMovementStrategy; import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.UnassignedInfo; import org.opensearch.cluster.routing.UnassignedInfo.AllocationStatus; @@ -107,8 +108,22 @@ public class BalancedShardsAllocator implements ShardsAllocator { "cluster.routing.allocation.move.primary_first", false, Property.Dynamic, + Property.NodeScope, + Property.Deprecated + ); + + /** + * Decides order in which to move shards from node when shards can not stay on node anymore. {@link LocalShardsBalancer#moveShards()} + * Encapsulates behavior of above SHARD_MOVE_PRIMARY_FIRST_SETTING. + */ + public static final Setting SHARD_MOVEMENT_STRATEGY_SETTING = new Setting( + "cluster.routing.allocation.shard_movement_strategy", + ShardMovementStrategy.NO_PREFERENCE.toString(), + ShardMovementStrategy::parse, + Property.Dynamic, Property.NodeScope ); + public static final Setting THRESHOLD_SETTING = Setting.floatSetting( "cluster.routing.allocation.balance.threshold", 1.0f, @@ -131,6 +146,7 @@ public class BalancedShardsAllocator implements ShardsAllocator { ); private volatile boolean movePrimaryFirst; + private volatile ShardMovementStrategy shardMovementStrategy; private volatile boolean preferPrimaryShardBalance; private volatile WeightFunction weightFunction; @@ -145,8 +161,10 @@ public BalancedShardsAllocator(Settings settings, ClusterSettings clusterSetting setWeightFunction(INDEX_BALANCE_FACTOR_SETTING.get(settings), SHARD_BALANCE_FACTOR_SETTING.get(settings)); setThreshold(THRESHOLD_SETTING.get(settings)); setPreferPrimaryShardBalance(PREFER_PRIMARY_SHARD_BALANCE.get(settings)); + setShardMovementStrategy(SHARD_MOVEMENT_STRATEGY_SETTING.get(settings)); clusterSettings.addSettingsUpdateConsumer(PREFER_PRIMARY_SHARD_BALANCE, this::setPreferPrimaryShardBalance); clusterSettings.addSettingsUpdateConsumer(SHARD_MOVE_PRIMARY_FIRST_SETTING, this::setMovePrimaryFirst); + clusterSettings.addSettingsUpdateConsumer(SHARD_MOVEMENT_STRATEGY_SETTING, this::setShardMovementStrategy); clusterSettings.addSettingsUpdateConsumer(INDEX_BALANCE_FACTOR_SETTING, SHARD_BALANCE_FACTOR_SETTING, this::setWeightFunction); clusterSettings.addSettingsUpdateConsumer(THRESHOLD_SETTING, this::setThreshold); } @@ -155,6 +173,10 @@ private void setMovePrimaryFirst(boolean movePrimaryFirst) { this.movePrimaryFirst = movePrimaryFirst; } + private void setShardMovementStrategy(ShardMovementStrategy shardMovementStrategy) { + this.shardMovementStrategy = shardMovementStrategy; + } + private void setWeightFunction(float indexBalance, float shardBalanceFactor) { weightFunction = new WeightFunction(indexBalance, shardBalanceFactor); } @@ -184,6 +206,7 @@ public void allocate(RoutingAllocation allocation) { logger, allocation, movePrimaryFirst, + shardMovementStrategy, weightFunction, threshold, preferPrimaryShardBalance @@ -205,6 +228,7 @@ public ShardAllocationDecision decideShardAllocation(final ShardRouting shard, f logger, allocation, movePrimaryFirst, + shardMovementStrategy, weightFunction, threshold, preferPrimaryShardBalance @@ -456,11 +480,12 @@ public Balancer( Logger logger, RoutingAllocation allocation, boolean movePrimaryFirst, + ShardMovementStrategy shardMovementStrategy, BalancedShardsAllocator.WeightFunction weight, float threshold, boolean preferPrimaryBalance ) { - super(logger, allocation, movePrimaryFirst, weight, threshold, preferPrimaryBalance); + super(logger, allocation, movePrimaryFirst, shardMovementStrategy, weight, threshold, preferPrimaryBalance); } } diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/LocalShardsBalancer.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/LocalShardsBalancer.java index 80b4f720bd104..e1e6b696e3ad2 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/LocalShardsBalancer.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/LocalShardsBalancer.java @@ -16,6 +16,7 @@ import org.opensearch.cluster.routing.RoutingNode; import org.opensearch.cluster.routing.RoutingNodes; import org.opensearch.cluster.routing.RoutingPool; +import org.opensearch.cluster.routing.ShardMovementStrategy; import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.ShardRoutingState; import org.opensearch.cluster.routing.UnassignedInfo; @@ -58,6 +59,7 @@ public class LocalShardsBalancer extends ShardsBalancer { private final RoutingAllocation allocation; private final RoutingNodes routingNodes; private final boolean movePrimaryFirst; + private final ShardMovementStrategy shardMovementStrategy; private final boolean preferPrimaryBalance; private final BalancedShardsAllocator.WeightFunction weight; @@ -74,6 +76,7 @@ public LocalShardsBalancer( Logger logger, RoutingAllocation allocation, boolean movePrimaryFirst, + ShardMovementStrategy shardMovementStrategy, BalancedShardsAllocator.WeightFunction weight, float threshold, boolean preferPrimaryBalance @@ -93,6 +96,7 @@ public LocalShardsBalancer( sorter = newNodeSorter(); inEligibleTargetNode = new HashSet<>(); this.preferPrimaryBalance = preferPrimaryBalance; + this.shardMovementStrategy = shardMovementStrategy; } /** @@ -527,6 +531,22 @@ private void checkAndAddInEligibleTargetNode(RoutingNode targetNode) { } } + /** + * Returns the correct Shard movement strategy to use. + * If users are still using deprecated setting "move_primary_first", we want behavior to remain unchanged. + * In the event of changing ShardMovementStrategy setting from default setting NO_PREFERENCE to either PRIMARY_FIRST or REPLICA_FIRST, we want that + * to have priority over values set in move_primary_first setting. + */ + private ShardMovementStrategy getShardMovementStrategy() { + if (shardMovementStrategy != ShardMovementStrategy.NO_PREFERENCE) { + return shardMovementStrategy; + } + if (movePrimaryFirst) { + return ShardMovementStrategy.PRIMARY_FIRST; + } + return ShardMovementStrategy.NO_PREFERENCE; + } + /** * Move started shards that can not be allocated to a node anymore * @@ -549,7 +569,8 @@ void moveShards() { checkAndAddInEligibleTargetNode(currentNode.getRoutingNode()); } boolean primariesThrottled = false; - for (Iterator it = allocation.routingNodes().nodeInterleavedShardIterator(movePrimaryFirst); it.hasNext();) { + for (Iterator it = allocation.routingNodes().nodeInterleavedShardIterator(getShardMovementStrategy()); it + .hasNext();) { // Verify if the cluster concurrent recoveries have been reached. if (allocation.deciders().canMoveAnyShard(allocation).type() != Decision.Type.YES) { logger.info( @@ -573,8 +594,8 @@ void moveShards() { continue; } - // Ensure that replicas don't relocate if primaries are being throttled and primary first is enabled - if (movePrimaryFirst && primariesThrottled && !shardRouting.primary()) { + // Ensure that replicas don't relocate if primaries are being throttled and primary first shard movement strategy is enabled + if ((shardMovementStrategy == ShardMovementStrategy.PRIMARY_FIRST) && primariesThrottled && !shardRouting.primary()) { logger.info( "Cannot move any replica shard in the cluster as movePrimaryFirst is enabled and primary shards" + "are being throttled. Skipping shard iteration" diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/decider/NodeVersionAllocationDecider.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/decider/NodeVersionAllocationDecider.java index 19b7494c000de..9344b4c87830d 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/decider/NodeVersionAllocationDecider.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/decider/NodeVersionAllocationDecider.java @@ -32,12 +32,18 @@ package org.opensearch.cluster.routing.allocation.decider; +import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.routing.RecoverySource; import org.opensearch.cluster.routing.RecoverySource.SnapshotRecoverySource; import org.opensearch.cluster.routing.RoutingNode; import org.opensearch.cluster.routing.RoutingNodes; import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.allocation.RoutingAllocation; +import org.opensearch.common.settings.Settings; +import org.opensearch.indices.replication.common.ReplicationType; + +import java.util.List; +import java.util.stream.Collectors; /** * An allocation decider that prevents relocation or allocation from nodes @@ -52,9 +58,35 @@ public class NodeVersionAllocationDecider extends AllocationDecider { public static final String NAME = "node_version"; + private final ReplicationType replicationType; + + public NodeVersionAllocationDecider(Settings settings) { + replicationType = IndexMetadata.INDEX_REPLICATION_TYPE_SETTING.get(settings); + } + @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { if (shardRouting.primary()) { + if (replicationType == ReplicationType.SEGMENT) { + List replicas = allocation.routingNodes() + .assignedShards(shardRouting.shardId()) + .stream() + .filter(shr -> !shr.primary() && shr.active()) + .collect(Collectors.toList()); + for (ShardRouting replica : replicas) { + // can not allocate if target node version > any existing replica version + RoutingNode replicaNode = allocation.routingNodes().node(replica.currentNodeId()); + if (node.node().getVersion().after(replicaNode.node().getVersion())) { + return allocation.decision( + Decision.NO, + NAME, + "When segment replication is enabled, cannot relocate primary shard to a node with version [%s] if it has a replica on older version [%s]", + node.node().getVersion(), + replicaNode.node().getVersion() + ); + } + } + } if (shardRouting.currentNodeId() == null) { if (shardRouting.recoverySource() != null && shardRouting.recoverySource().getType() == RecoverySource.Type.SNAPSHOT) { // restoring from a snapshot - check that the node can handle the version diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index 802eb7bd01254..064edbc76bd8b 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -240,6 +240,7 @@ public void apply(Settings value, Settings current, Settings previous) { BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING, BalancedShardsAllocator.PREFER_PRIMARY_SHARD_BALANCE, BalancedShardsAllocator.SHARD_MOVE_PRIMARY_FIRST_SETTING, + BalancedShardsAllocator.SHARD_MOVEMENT_STRATEGY_SETTING, BalancedShardsAllocator.THRESHOLD_SETTING, BreakerSettings.CIRCUIT_BREAKER_LIMIT_SETTING, BreakerSettings.CIRCUIT_BREAKER_OVERHEAD_SETTING, diff --git a/server/src/test/java/org/opensearch/cluster/routing/RoutingNodesTests.java b/server/src/test/java/org/opensearch/cluster/routing/RoutingNodesTests.java index 73136a71bc12a..23056ec5782bb 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/RoutingNodesTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/RoutingNodesTests.java @@ -124,7 +124,7 @@ private IndexMetadata.Builder createIndexMetadata(String indexName) { .numberOfShards(this.numberOfShards); } - public void testInterleavedShardIterator() { + public void testInterleavedShardIteratorPrimaryFirst() { // Initialize all the shards for test index 1 and 2 initPrimaries(); startInitializingShards(TEST_INDEX_1); @@ -147,7 +147,8 @@ public void testInterleavedShardIterator() { } // Get primary first shard iterator and assert primary shards are iterated over first - final Iterator iterator = this.clusterState.getRoutingNodes().nodeInterleavedShardIterator(true); + final Iterator iterator = this.clusterState.getRoutingNodes() + .nodeInterleavedShardIterator(ShardMovementStrategy.PRIMARY_FIRST); boolean iteratingPrimary = true; int shardCount = 0; while (iterator.hasNext()) { @@ -155,11 +156,54 @@ public void testInterleavedShardIterator() { if (iteratingPrimary) { iteratingPrimary = shard.primary(); } else { - assert shard.primary() == false; + assertFalse(shard.primary()); } shardCount++; } - assert shardCount == this.totalNumberOfShards; + assertEquals(shardCount, this.totalNumberOfShards); + } + + public void testInterleavedShardIteratorNoPreference() { + // Initialize all the shards for test index 1 and 2 + initPrimaries(); + startInitializingShards(TEST_INDEX_1); + startInitializingShards(TEST_INDEX_1); + startInitializingShards(TEST_INDEX_2); + startInitializingShards(TEST_INDEX_2); + + final Iterator iterator = this.clusterState.getRoutingNodes() + .nodeInterleavedShardIterator(ShardMovementStrategy.NO_PREFERENCE); + int shardCount = 0; + while (iterator.hasNext()) { + final ShardRouting shard = iterator.next(); + shardCount++; + } + assertEquals(shardCount, this.totalNumberOfShards); + } + + public void testInterleavedShardIteratorReplicaFirst() { + // Initialize all the shards for test index 1 and 2 + initPrimaries(); + startInitializingShards(TEST_INDEX_1); + startInitializingShards(TEST_INDEX_1); + startInitializingShards(TEST_INDEX_2); + startInitializingShards(TEST_INDEX_2); + + // Get replica first shard iterator and assert replica shards are iterated over first + final Iterator iterator = this.clusterState.getRoutingNodes() + .nodeInterleavedShardIterator(ShardMovementStrategy.REPLICA_FIRST); + boolean iteratingReplica = true; + int shardCount = 0; + while (iterator.hasNext()) { + final ShardRouting shard = iterator.next(); + if (iteratingReplica) { + iteratingReplica = shard.primary() == false; + } else { + assertTrue(shard.primary()); + } + shardCount++; + } + assertEquals(shardCount, this.totalNumberOfShards); } public void testSwapPrimaryWithReplica() { diff --git a/server/src/test/java/org/opensearch/cluster/routing/MovePrimaryFirstTests.java b/server/src/test/java/org/opensearch/cluster/routing/ShardMovementStrategyTests.java similarity index 60% rename from server/src/test/java/org/opensearch/cluster/routing/MovePrimaryFirstTests.java rename to server/src/test/java/org/opensearch/cluster/routing/ShardMovementStrategyTests.java index a30581e2576e2..12994bdfcf6d5 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/MovePrimaryFirstTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/ShardMovementStrategyTests.java @@ -22,7 +22,7 @@ @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0) @ThreadLeakScope(ThreadLeakScope.Scope.NONE) -public class MovePrimaryFirstTests extends OpenSearchIntegTestCase { +public class ShardMovementStrategyTests extends OpenSearchIntegTestCase { protected String startDataOnlyNode(final String zone) { final Settings settings = Settings.builder().put("node.attr.zone", zone).build(); @@ -48,14 +48,48 @@ protected void createAndIndex(String index, int replicaCount, int shardCount) { flushAndRefresh(index); } + private static Settings.Builder getSettings(ShardMovementStrategy shardMovementStrategy, boolean movePrimaryFirst) { + return Settings.builder() + .put("cluster.routing.allocation.shard_movement_strategy", shardMovementStrategy) + .put("cluster.routing.allocation.move.primary_first", movePrimaryFirst); + } + + public void testClusterGreenAfterPartialRelocationPrimaryFirstShardMovementMovePrimarySettingEnabled() throws InterruptedException { + testClusterGreenAfterPartialRelocation(ShardMovementStrategy.PRIMARY_FIRST, true); + } + + public void testClusterGreenAfterPartialRelocationPrimaryFirstShardMovementMovePrimarySettingDisabled() throws InterruptedException { + testClusterGreenAfterPartialRelocation(ShardMovementStrategy.PRIMARY_FIRST, false); + } + + public void testClusterGreenAfterPartialRelocationReplicaFirstShardMovementPrimaryFirstEnabled() throws InterruptedException { + testClusterGreenAfterPartialRelocation(ShardMovementStrategy.REPLICA_FIRST, true); + } + + public void testClusterGreenAfterPartialRelocationReplicaFirstShardMovementPrimaryFirstDisabled() throws InterruptedException { + testClusterGreenAfterPartialRelocation(ShardMovementStrategy.REPLICA_FIRST, false); + } + + public void testClusterGreenAfterPartialRelocationNoPreferenceShardMovementPrimaryFirstEnabled() throws InterruptedException { + testClusterGreenAfterPartialRelocation(ShardMovementStrategy.NO_PREFERENCE, true); + } + + private boolean shouldMovePrimaryShardsFirst(ShardMovementStrategy shardMovementStrategy, boolean movePrimaryFirst) { + if (shardMovementStrategy == ShardMovementStrategy.NO_PREFERENCE && movePrimaryFirst) { + return true; + } + return shardMovementStrategy == ShardMovementStrategy.PRIMARY_FIRST; + } + /** * Creates two nodes each in two zones and shuts down nodes in zone1 after * relocating half the number of shards. Shards per node constraint ensures * that exactly 50% of shards relocate to nodes in zone2 giving time to shut down - * nodes in zone1. Since primaries are relocated first as movePrimaryFirst is - * enabled, cluster should not become red and zone2 nodes have all the primaries + * nodes in zone1. Depending on the shard movement strategy, we check whether the + * primary or replica shards are moved first, and zone2 nodes have all the shards */ - public void testClusterGreenAfterPartialRelocation() throws InterruptedException { + private void testClusterGreenAfterPartialRelocation(ShardMovementStrategy shardMovementStrategy, boolean movePrimaryFirst) + throws InterruptedException { internalCluster().startClusterManagerOnlyNodes(1); final String z1 = "zone-1", z2 = "zone-2"; final int primaryShardCount = 6; @@ -73,7 +107,7 @@ public void testClusterGreenAfterPartialRelocation() throws InterruptedException // zone nodes excluded to prevent any shard relocation ClusterUpdateSettingsRequest settingsRequest = new ClusterUpdateSettingsRequest(); settingsRequest.persistentSettings( - Settings.builder().put("cluster.routing.allocation.move.primary_first", true).put("cluster.routing.allocation.exclude.zone", z2) + getSettings(shardMovementStrategy, movePrimaryFirst).put("cluster.routing.allocation.exclude.zone", z2) ); client().admin().cluster().updateSettings(settingsRequest).actionGet(); @@ -82,7 +116,7 @@ public void testClusterGreenAfterPartialRelocation() throws InterruptedException // Create cluster state listener to compute number of shards on new zone // nodes before counting down the latch - final CountDownLatch primaryMoveLatch = new CountDownLatch(1); + final CountDownLatch shardMoveLatch = new CountDownLatch(1); final ClusterStateListener listener = event -> { if (event.routingTableChanged()) { final RoutingNodes routingNodes = event.state().getRoutingNodes(); @@ -91,13 +125,22 @@ public void testClusterGreenAfterPartialRelocation() throws InterruptedException RoutingNode routingNode = it.next(); final String nodeName = routingNode.node().getName(); if (nodeName.equals(z2n1) || nodeName.equals(z2n2)) { - startedCount += routingNode.numberOfShardsWithState(ShardRoutingState.STARTED); + int count = 0; + for (ShardRouting shardEntry : routingNode) { + // If shard movement strategy is primary first, asserting that primary shards are moved first; else assert + // shards are replicas + if ((shardEntry.primary() == shouldMovePrimaryShardsFirst(shardMovementStrategy, movePrimaryFirst)) + && shardEntry.state() == ShardRoutingState.STARTED) { + count++; + } + } + startedCount += count; } } - // Count down the latch once all the primary shards have initialized on nodes in zone-2 + // Count down the latch once all the shards have initialized on nodes in zone-2 if (startedCount == primaryShardCount) { - primaryMoveLatch.countDown(); + shardMoveLatch.countDown(); } } }; @@ -108,12 +151,12 @@ public void testClusterGreenAfterPartialRelocation() throws InterruptedException settingsRequest.persistentSettings( Settings.builder() .put("cluster.routing.allocation.exclude.zone", z1) - // Total shards per node constraint is added to pause the relocation after primary shards + // Total shards per node constraint is added to pause the relocation after shards // have relocated to allow time for node shutdown and validate yellow cluster .put("cluster.routing.allocation.total_shards_per_node", primaryShardCount / 2) ); client().admin().cluster().updateSettings(settingsRequest); - primaryMoveLatch.await(); + shardMoveLatch.await(); // Shutdown both nodes in zone 1 and ensure cluster does not become red try { diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 557d7db142671..d4ec340766ec9 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -67,6 +67,7 @@ import org.opensearch.common.util.set.Sets; import org.opensearch.core.index.Index; import org.opensearch.core.index.shard.ShardId; +import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.repositories.IndexId; import org.opensearch.snapshots.EmptySnapshotsInfoService; import org.opensearch.snapshots.InternalSnapshotsInfoService; @@ -439,7 +440,9 @@ public void testRebalanceDoesNotAllocatePrimaryAndReplicasOnDifferentVersionNode .routingTable(routingTable) .nodes(DiscoveryNodes.builder().add(newNode).add(oldNode1).add(oldNode2)) .build(); - AllocationDeciders allocationDeciders = new AllocationDeciders(Collections.singleton(new NodeVersionAllocationDecider())); + AllocationDeciders allocationDeciders = new AllocationDeciders( + Collections.singleton(new NodeVersionAllocationDecider(Settings.EMPTY)) + ); AllocationService strategy = new MockAllocationService( allocationDeciders, new TestGatewayAllocator(), @@ -509,7 +512,7 @@ public void testRestoreDoesNotAllocateSnapshotOnOlderNodes() { .nodes(DiscoveryNodes.builder().add(newNode).add(oldNode1).add(oldNode2)) .build(); AllocationDeciders allocationDeciders = new AllocationDeciders( - Arrays.asList(new ReplicaAfterPrimaryActiveAllocationDecider(), new NodeVersionAllocationDecider()) + Arrays.asList(new ReplicaAfterPrimaryActiveAllocationDecider(), new NodeVersionAllocationDecider(Settings.EMPTY)) ); AllocationService strategy = new MockAllocationService( allocationDeciders, @@ -526,6 +529,148 @@ public void testRestoreDoesNotAllocateSnapshotOnOlderNodes() { } } + public void testRebalanceDoesNotAllocatePrimaryOnHigherVersionNodesSegrepEnabled() { + ShardId shard1 = new ShardId("test1", "_na_", 0); + ShardId shard2 = new ShardId("test2", "_na_", 0); + final DiscoveryNode newNode1 = new DiscoveryNode( + "newNode1", + buildNewFakeTransportAddress(), + emptyMap(), + CLUSTER_MANAGER_DATA_ROLES, + Version.CURRENT + ); + final DiscoveryNode newNode2 = new DiscoveryNode( + "newNode2", + buildNewFakeTransportAddress(), + emptyMap(), + CLUSTER_MANAGER_DATA_ROLES, + Version.CURRENT + ); + final DiscoveryNode oldNode1 = new DiscoveryNode( + "oldNode1", + buildNewFakeTransportAddress(), + emptyMap(), + CLUSTER_MANAGER_DATA_ROLES, + VersionUtils.getPreviousVersion() + ); + final DiscoveryNode oldNode2 = new DiscoveryNode( + "oldNode2", + buildNewFakeTransportAddress(), + emptyMap(), + CLUSTER_MANAGER_DATA_ROLES, + VersionUtils.getPreviousVersion() + ); + AllocationId allocationId1P = AllocationId.newInitializing(); + AllocationId allocationId1R = AllocationId.newInitializing(); + AllocationId allocationId2P = AllocationId.newInitializing(); + AllocationId allocationId2R = AllocationId.newInitializing(); + + Settings segmentReplicationSettings = Settings.builder() + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .build(); + Metadata metadata = Metadata.builder() + .put( + IndexMetadata.builder(shard1.getIndexName()) + .settings(settings(Version.CURRENT).put(segmentReplicationSettings)) + .numberOfShards(1) + .numberOfReplicas(1) + .putInSyncAllocationIds(0, Sets.newHashSet(allocationId1P.getId(), allocationId1R.getId())) + ) + .put( + IndexMetadata.builder(shard2.getIndexName()) + .settings(settings(Version.CURRENT).put(segmentReplicationSettings)) + .numberOfShards(1) + .numberOfReplicas(1) + .putInSyncAllocationIds(0, Sets.newHashSet(allocationId2P.getId(), allocationId2R.getId())) + ) + .build(); + RoutingTable routingTable = RoutingTable.builder() + .add( + IndexRoutingTable.builder(shard1.getIndex()) + .addIndexShard( + new IndexShardRoutingTable.Builder(shard1).addShard( + TestShardRouting.newShardRouting( + shard1.getIndexName(), + shard1.getId(), + oldNode1.getId(), + null, + true, + ShardRoutingState.STARTED, + allocationId1P + ) + ) + .addShard( + TestShardRouting.newShardRouting( + shard1.getIndexName(), + shard1.getId(), + oldNode2.getId(), + null, + false, + ShardRoutingState.STARTED, + allocationId1R + ) + ) + .build() + ) + ) + .add( + IndexRoutingTable.builder(shard2.getIndex()) + .addIndexShard( + new IndexShardRoutingTable.Builder(shard2).addShard( + TestShardRouting.newShardRouting( + shard2.getIndexName(), + shard2.getId(), + oldNode2.getId(), + null, + true, + ShardRoutingState.STARTED, + allocationId2P + ) + ) + .addShard( + TestShardRouting.newShardRouting( + shard2.getIndexName(), + shard2.getId(), + oldNode1.getId(), + null, + false, + ShardRoutingState.STARTED, + allocationId2R + ) + ) + .build() + ) + ) + .build(); + ClusterState state = ClusterState.builder(org.opensearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) + .metadata(metadata) + .routingTable(routingTable) + .nodes(DiscoveryNodes.builder().add(newNode1).add(newNode2).add(oldNode1).add(oldNode2)) + .build(); + AllocationDeciders allocationDeciders = new AllocationDeciders( + Collections.singleton(new NodeVersionAllocationDecider(segmentReplicationSettings)) + ); + AllocationService strategy = new MockAllocationService( + allocationDeciders, + new TestGatewayAllocator(), + new BalancedShardsAllocator(Settings.EMPTY), + EmptyClusterInfoService.INSTANCE, + EmptySnapshotsInfoService.INSTANCE + ); + state = strategy.reroute(state, new AllocationCommands(), true, false).getClusterState(); + // the two indices must stay as is, the replicas cannot move to oldNode2 because versions don't match + assertThat(state.routingTable().index(shard2.getIndex()).shardsWithState(ShardRoutingState.RELOCATING).size(), equalTo(1)); + assertThat( + state.routingTable().index(shard2.getIndex()).shardsWithState(ShardRoutingState.RELOCATING).get(0).primary(), + equalTo(false) + ); + assertThat(state.routingTable().index(shard1.getIndex()).shardsWithState(ShardRoutingState.RELOCATING).size(), equalTo(1)); + assertThat( + state.routingTable().index(shard1.getIndex()).shardsWithState(ShardRoutingState.RELOCATING).get(0).primary(), + equalTo(false) + ); + } + private ClusterState stabilize(ClusterState clusterState, AllocationService service) { logger.trace("RoutingNodes: {}", clusterState.getRoutingNodes()); @@ -626,7 +771,7 @@ public void testMessages() { RoutingAllocation routingAllocation = new RoutingAllocation(null, clusterState.getRoutingNodes(), clusterState, null, null, 0); routingAllocation.debugDecision(true); - final NodeVersionAllocationDecider allocationDecider = new NodeVersionAllocationDecider(); + final NodeVersionAllocationDecider allocationDecider = new NodeVersionAllocationDecider(Settings.EMPTY); Decision decision = allocationDecider.canAllocate(primaryShard, newNode, routingAllocation); assertThat(decision.type(), is(Decision.Type.YES)); assertThat(decision.getExplanation(), is("the primary shard is new or already existed on the node")); From bb7893048153c1bbd205b2cc4c5321504b37efc3 Mon Sep 17 00:00:00 2001 From: Poojita Raj Date: Fri, 4 Aug 2023 20:10:24 -0700 Subject: [PATCH 61/75] Mute flaky test ReplicaToPrimaryPromotionIT.testFailoverWhileIndexing (#9131) Signed-off-by: Poojita Raj --- .../org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java index c73e7f603b09b..b68fd1f764a63 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/ReplicaToPrimaryPromotionIT.java @@ -122,6 +122,7 @@ public void testPromoteReplicaToPrimary() throws Exception { assertHitCount(client().prepareSearch(indexName).setSize(0).get(), numOfDocs); } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/9130") public void testFailoverWhileIndexing() throws Exception { internalCluster().startNode(); internalCluster().startNode(); From 4c5981091fcff67830bc6a9f2f9dee9469dffae7 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Sat, 5 Aug 2023 00:34:36 -0500 Subject: [PATCH 62/75] [Refactor] Remaining Strings utility methods to core library (#9103) This commit refactors the remaining Strings# utility methods from the duplicate :server:Strings class to the :libs:opensearch-core:Strings class. The CollectionUtils class (used by the Strings utility) is also refactored to the :libs:opensearch-common library and the unnecessary Strings.toString(XContentBuilder... static method is finally refactored as an instance method in XContentBuilder. The Strings class is also cleaned up for clarity. Signed-off-by: Nicholas Walter Knize --- .../client/indices/CreateIndexRequest.java | 3 +- .../client/slm/SnapshotLifecyclePolicy.java | 2 +- .../slm/SnapshotLifecyclePolicyMetadata.java | 2 +- .../client/slm/SnapshotLifecycleStats.java | 2 +- .../slm/SnapshotRetentionConfiguration.java | 2 +- .../opensearch/client/BulkProcessorIT.java | 3 +- .../java/org/opensearch/client/SearchIT.java | 5 +- .../documentation/CRUDDocumentationIT.java | 19 +- .../opensearch/test/rest/NodeRestUsageIT.java | 2 +- .../org/opensearch/core/common/Strings.java | 77 ++ .../core/xcontent/XContentBuilder.java | 10 + ...ultShardOperationFailedExceptionTests.java | 2 +- .../opensearch/core/common/StringsTests.java | 51 + .../common/xcontent/ObjectParserTests.java | 3 +- .../common/xcontent/SimpleStruct.java | 2 +- .../common/xcontent/XContentParserTests.java | 17 +- ...ractGeoAggregatorModulePluginTestCase.java | 3 +- .../mustache/MultiSearchTemplateIT.java | 18 +- .../mustache/CustomMustacheFactory.java | 3 +- .../mustache/MultiSearchTemplateResponse.java | 2 +- .../mustache/SearchTemplateRequest.java | 7 +- .../mapper/RankFeatureFieldMapperTests.java | 3 +- .../RankFeatureMetaFieldMapperTests.java | 32 +- .../mapper/RankFeaturesFieldMapperTests.java | 3 +- .../mapper/ScaledFloatFieldMapperTests.java | 3 +- .../SearchAsYouTypeFieldMapperTests.java | 2 +- .../query/RankFeatureQueryBuilderTests.java | 19 +- .../join/query/ParentChildTestCase.java | 3 +- .../mapper/ParentJoinFieldMapperTests.java | 474 +++---- .../join/query/HasChildQueryBuilderTests.java | 3 +- .../query/HasParentQueryBuilderTests.java | 3 +- .../join/query/ParentIdQueryBuilderTests.java | 3 +- .../percolator/CandidateQueryTests.java | 83 +- .../PercolateQueryBuilderTests.java | 7 +- .../PercolateWithNestedQueryBuilderTests.java | 3 +- .../PercolatorFieldMapperTests.java | 221 ++- .../index/rankeval/RankEvalResponse.java | 2 +- .../index/rankeval/RankEvalSpec.java | 2 +- .../index/rankeval/RatedDocument.java | 2 +- .../index/rankeval/RatedRequest.java | 2 +- .../DiscountedCumulativeGainTests.java | 2 +- .../index/rankeval/RankEvalSpecTests.java | 3 +- .../reindex/remote/RemoteRequestBuilders.java | 7 +- .../RepositoryURLClientYamlTestSuiteIT.java | 3 +- .../rest/Netty4HeadBodyIsEmptyIT.java | 9 +- .../ICUCollationKeywordFieldMapperTests.java | 3 +- .../org/opensearch/cloud/gce/GCEFixture.java | 61 +- .../CorrelationVectorsEngineIT.java | 12 +- .../query/CorrelationQueryBuilderTests.java | 2 +- .../AnnotatedTextFieldMapperTests.java | 192 ++- .../index/mapper/size/SizeMappingTests.java | 20 +- ...rossClusterSearchUnavailableClusterIT.java | 3 +- .../upgrades/FullClusterRestartIT.java | 44 +- .../upgrades/QueryBuilderBWCIT.java | 3 +- .../org/opensearch/backwards/IndexingIT.java | 6 +- .../java/org/opensearch/search/CCSDuelIT.java | 4 +- .../org/opensearch/upgrades/RecoveryIT.java | 2 +- .../http/SearchRestCancellationIT.java | 3 +- .../opensearch/upgrades/TranslogPolicyIT.java | 2 +- .../ClusterAllocationExplainIT.java | 3 +- .../action/bulk/BulkWithUpdatesIT.java | 2 +- .../cluster/routing/PrimaryAllocationIT.java | 2 +- .../document/DocumentActionsIT.java | 15 +- .../gateway/RecoveryFromGatewayIT.java | 45 +- .../java/org/opensearch/get/GetActionIT.java | 25 +- .../mapper/CopyToMapperIntegrationIT.java | 20 +- .../opensearch/index/shard/IndexShardIT.java | 2 +- .../mapping/SimpleGetFieldMappingsIT.java | 9 +- .../RandomExceptionCircuitBreakerIT.java | 31 +- .../indices/recovery/IndexRecoveryIT.java | 2 +- .../indices/state/OpenCloseIndexIT.java | 20 +- .../search/SearchCancellationIT.java | 2 +- .../aggregations/bucket/DateHistogramIT.java | 20 +- .../SignificantTermsSignificanceScoreIT.java | 3 +- .../basic/SearchWithRandomExceptionsIT.java | 20 +- .../basic/SearchWithRandomIOExceptionsIT.java | 20 +- .../highlight/HighlighterSearchIT.java | 77 +- .../search/fields/SearchFieldsIT.java | 326 +++-- .../opensearch/search/geo/GeoFilterIT.java | 22 +- .../search/geo/GeoShapeIntegrationIT.java | 118 +- .../geo/LegacyGeoShapeIntegrationIT.java | 122 +- .../search/morelikethis/MoreLikeThisIT.java | 9 +- .../org/opensearch/search/query/ExistsIT.java | 10 +- .../search/query/SimpleQueryStringIT.java | 43 +- .../search/slice/SearchSliceIT.java | 38 +- .../opensearch/search/sort/SimpleSortIT.java | 59 +- .../search/suggest/SuggestSearchIT.java | 61 +- .../cluster/health/ClusterHealthResponse.java | 2 +- .../cluster/node/info/NodesInfoResponse.java | 3 +- .../NodesReloadSecureSettingsResponse.java | 3 +- .../node/stats/NodesStatsResponse.java | 3 +- .../tasks/cancel/CancelTasksResponse.java | 2 +- .../node/tasks/get/GetTaskResponse.java | 2 +- .../node/tasks/list/ListTasksResponse.java | 2 +- .../node/usage/NodesUsageResponse.java | 3 +- .../restore/RestoreRemoteStoreRequest.java | 2 +- .../stats/RemoteStoreStatsResponse.java | 2 +- .../verify/VerifyRepositoryResponse.java | 2 +- .../settings/ClusterGetSettingsResponse.java | 2 +- .../snapshots/clone/CloneSnapshotRequest.java | 2 +- .../create/CreateSnapshotRequest.java | 8 +- .../snapshots/get/GetSnapshotsResponse.java | 2 +- .../restore/RestoreSnapshotRequest.java | 2 +- .../snapshots/status/SnapshotStats.java | 2 +- .../snapshots/status/SnapshotStatus.java | 2 +- .../admin/cluster/stats/AnalysisStats.java | 2 +- .../cluster/stats/ClusterStatsResponse.java | 3 +- .../admin/cluster/stats/MappingStats.java | 2 +- .../action/admin/indices/alias/Alias.java | 16 +- .../indices/alias/IndicesAliasesRequest.java | 4 +- .../admin/indices/analyze/AnalyzeAction.java | 2 +- .../indices/close/CloseIndexResponse.java | 2 +- .../indices/create/CreateIndexRequest.java | 5 +- .../admin/indices/get/GetIndexResponse.java | 2 +- .../mapping/get/GetMappingsResponse.java | 2 +- .../readonly/AddIndexBlockResponse.java | 2 +- .../indices/recovery/RecoveryResponse.java | 2 +- .../SegmentReplicationStatsResponse.java | 2 +- .../admin/indices/rollover/RolloverInfo.java | 2 +- .../settings/get/GetSettingsResponse.java | 3 +- .../settings/put/UpdateSettingsRequest.java | 2 +- .../indices/stats/IndicesStatsResponse.java | 2 +- .../template/put/PutIndexTemplateRequest.java | 3 +- .../action/bulk/BulkItemRequest.java | 2 +- .../action/bulk/BulkItemResponse.java | 2 +- .../action/fieldcaps/FieldCapabilities.java | 2 +- .../fieldcaps/FieldCapabilitiesResponse.java | 2 +- .../opensearch/action/get/GetResponse.java | 2 +- .../action/get/MultiGetRequest.java | 2 +- .../action/index/IndexResponse.java | 2 +- .../action/ingest/GetPipelineResponse.java | 2 +- .../search/GetSearchPipelineResponse.java | 2 +- .../action/search/MultiSearchResponse.java | 2 +- .../action/search/SearchResponse.java | 2 +- .../support/replication/ReplicationTask.java | 2 +- .../cluster/RepositoryCleanupInProgress.java | 2 +- .../cluster/SnapshotsInProgress.java | 2 +- .../ClusterAwarenessAttributeValueHealth.java | 2 +- .../ClusterAwarenessAttributesHealth.java | 2 +- .../cluster/coordination/Coordinator.java | 2 +- .../DecommissionAttributeMetadata.java | 2 +- .../cluster/health/ClusterShardHealth.java | 2 +- .../cluster/metadata/AliasMetadata.java | 2 +- .../cluster/metadata/ComponentTemplate.java | 2 +- .../metadata/ComponentTemplateMetadata.java | 2 +- .../metadata/ComposableIndexTemplate.java | 2 +- .../ComposableIndexTemplateMetadata.java | 2 +- .../cluster/metadata/DataStreamMetadata.java | 2 +- .../metadata/IndexTemplateMetadata.java | 7 +- .../MetadataIndexTemplateService.java | 16 +- .../metadata/RepositoriesMetadata.java | 2 +- .../opensearch/cluster/metadata/Template.java | 22 +- .../metadata/WeightedRoutingMetadata.java | 2 +- .../allocation/DiskThresholdSettings.java | 4 +- .../command/AllocationCommands.java | 2 +- .../java/org/opensearch/common/Strings.java | 133 -- .../common/geo/builders/ShapeBuilder.java | 2 +- .../opensearch/common/settings/Setting.java | 6 +- .../opensearch/common/settings/Settings.java | 12 +- .../common/settings/SettingsModule.java | 3 +- .../common/xcontent/XContentHelper.java | 3 +- .../org/opensearch/index/get/GetResult.java | 2 +- .../index/mapper/MapperService.java | 10 +- .../org/opensearch/index/mapper/Mapping.java | 3 +- .../index/mapper/RootObjectMapper.java | 2 +- .../index/query/AbstractQueryBuilder.java | 2 +- .../index/query/InnerHitBuilder.java | 2 +- .../index/query/MoreLikeThisQueryBuilder.java | 4 +- .../index/query/SpanNearQueryBuilder.java | 4 +- .../index/reindex/BulkByScrollTask.java | 2 +- .../index/reindex/ScrollableHitSource.java | 4 +- .../index/search/stats/SearchStats.java | 2 +- .../index/shard/PrimaryReplicaSyncer.java | 2 +- .../RemoveCorruptedShardDataCommand.java | 2 +- .../index/translog/TranslogStats.java | 2 +- .../common/ReplicationLuceneIndex.java | 3 +- .../ingest/PipelineConfiguration.java | 2 +- .../PersistentTasksCustomMetadata.java | 2 +- .../PersistentTasksNodeService.java | 2 +- .../repositories/RepositoryCleanupResult.java | 2 +- .../repositories/RepositoryInfo.java | 2 +- .../repositories/RepositoryStatsSnapshot.java | 2 +- .../java/org/opensearch/script/Script.java | 3 +- .../opensearch/script/ScriptException.java | 3 +- .../opensearch/script/StoredScriptSource.java | 3 +- .../java/org/opensearch/search/SearchHit.java | 2 +- .../aggregations/AggregationBuilder.java | 2 +- .../aggregations/AggregatorFactories.java | 2 +- .../search/aggregations/BucketOrder.java | 2 +- .../aggregations/InternalAggregation.java | 2 +- .../PipelineAggregationBuilder.java | 2 +- .../BaseMultiValuesSourceFieldConfig.java | 2 +- .../highlight/AbstractHighlighterBuilder.java | 2 +- .../pipeline/PipelineConfiguration.java | 2 +- .../search/rescore/RescorerBuilder.java | 2 +- .../searchafter/SearchAfterBuilder.java | 3 +- .../opensearch/search/slice/SliceBuilder.java | 4 +- .../opensearch/search/sort/SortBuilder.java | 2 +- .../opensearch/search/suggest/Suggest.java | 2 +- .../search/suggest/SuggestBuilder.java | 2 +- .../completion/context/ContextMapping.java | 3 +- .../DirectCandidateGeneratorBuilder.java | 3 +- .../org/opensearch/snapshots/RestoreInfo.java | 2 +- .../org/opensearch/tasks/RawTaskStatus.java | 2 +- .../java/org/opensearch/tasks/TaskId.java | 2 +- .../java/org/opensearch/tasks/TaskInfo.java | 2 +- .../opensearch/tasks/TaskResourceStats.java | 2 +- .../opensearch/tasks/TaskResourceUsage.java | 2 +- .../java/org/opensearch/tasks/TaskResult.java | 2 +- .../org/opensearch/tasks/TaskThreadUsage.java | 2 +- .../transport/RemoteClusterService.java | 2 +- .../ExceptionSerializationTests.java | 2 +- .../opensearch/OpenSearchExceptionTests.java | 35 +- .../ClusterAllocationExplainActionTests.java | 3 +- .../ClusterAllocationExplanationTests.java | 3 +- .../node/tasks/TransportTasksActionTests.java | 10 +- .../reroute/ClusterRerouteResponseTests.java | 7 +- .../indices/alias/AliasActionsTests.java | 3 +- .../close/CloseIndexResponseTests.java | 2 +- .../create/CreateIndexRequestTests.java | 10 +- .../create/CreateIndexResponseTests.java | 2 +- .../get/GetFieldMappingsResponseTests.java | 2 +- .../mapping/put/PutMappingRequestTests.java | 2 +- .../MetadataRolloverServiceTests.java | 3 +- .../indices/shrink/ResizeRequestTests.java | 2 +- .../indices/shrink/ResizeResponseTests.java | 2 +- .../indices/stats/IndicesStatsTests.java | 2 +- .../query/ValidateQueryResponseTests.java | 2 +- .../action/delete/DeleteResponseTests.java | 2 +- .../action/get/GetResponseTests.java | 2 +- .../action/index/IndexResponseTests.java | 2 +- .../action/main/MainResponseTests.java | 3 +- .../search/ClearScrollRequestTests.java | 3 +- .../search/MultiSearchRequestTests.java | 2 +- .../SearchPhaseExecutionExceptionTests.java | 2 +- .../action/search/SearchResponseTests.java | 13 +- .../search/SearchScrollRequestTests.java | 3 +- .../replication/ReplicationResponseTests.java | 2 +- .../action/update/UpdateResponseTests.java | 2 +- .../opensearch/cluster/ClusterStateTests.java | 9 +- .../cluster/metadata/IndexGraveyardTests.java | 2 +- .../cluster/metadata/IndexMetadataTests.java | 2 +- .../MetadataCreateIndexServiceTests.java | 24 +- .../MetadataIndexTemplateServiceTests.java | 22 +- .../cluster/metadata/MetadataTests.java | 4 +- .../metadata/ToAndFromJsonMetadataTests.java | 11 +- .../ClusterStateToStringTests.java | 2 +- .../org/opensearch/common/StringsTests.java | 90 -- .../common/geo/GeoJsonParserTests.java | 322 +++-- .../common/geo/GeoJsonShapeParserTests.java | 1228 ++++++++--------- .../common/geo/GeometryParserTests.java | 9 +- .../common/settings/SettingsFilterTests.java | 3 +- .../common/settings/SettingsTests.java | 11 +- .../common/xcontent/BaseXContentTestCase.java | 10 +- .../common/xcontent/XContentFactoryTests.java | 5 +- .../builder/XContentBuilderTests.java | 40 +- .../support/XContentMapValuesTests.java | 24 +- .../AbstractXContentFilteringTestCase.java | 3 +- .../opensearch/index/IndexServiceTests.java | 3 +- .../fielddata/BinaryDVFieldDataTests.java | 26 +- .../index/get/DocumentFieldTests.java | 2 +- .../opensearch/index/get/GetResultTests.java | 2 +- .../index/mapper/BooleanFieldMapperTests.java | 5 +- .../mapper/CompletionFieldMapperTests.java | 2 +- .../mapper/DataStreamFieldMapperTests.java | 70 +- .../index/mapper/DocumentParserTests.java | 24 +- .../index/mapper/DynamicMappingTests.java | 2 +- .../index/mapper/DynamicTemplateTests.java | 9 +- .../index/mapper/FieldAliasMapperTests.java | 32 +- .../mapper/FieldNamesFieldMapperTests.java | 102 +- .../mapper/FlatObjectFieldDataTests.java | 24 +- .../mapper/FlatObjectFieldMapperTests.java | 17 +- .../mapper/GeoShapeFieldMapperTests.java | 2 +- .../index/mapper/IdFieldMapperTests.java | 3 +- .../index/mapper/IndexFieldMapperTests.java | 14 +- .../index/mapper/IpRangeFieldMapperTests.java | 53 +- .../index/mapper/KeywordFieldMapperTests.java | 3 +- .../LegacyGeoShapeFieldMapperTests.java | 4 +- .../index/mapper/MapperServiceTests.java | 32 +- .../index/mapper/MultiFieldTests.java | 67 +- .../index/mapper/NestedObjectMapperTests.java | 554 ++++---- .../mapper/NullValueObjectMappingTests.java | 24 +- .../index/mapper/NullValueTests.java | 3 +- .../index/mapper/NumberFieldMapperTests.java | 3 +- .../index/mapper/ObjectMapperTests.java | 501 ++++--- .../index/mapper/ParametrizedMapperTests.java | 60 +- .../index/mapper/RangeFieldMapperTests.java | 5 +- ...angeFieldQueryStringQueryBuilderTests.java | 31 +- .../index/mapper/RootObjectMapperTests.java | 317 +++-- .../index/mapper/RoutingFieldMapperTests.java | 5 +- .../index/mapper/SourceFieldMapperTests.java | 231 ++-- .../mapper/StoredNumericValuesTests.java | 106 +- .../index/mapper/TextFieldMapperTests.java | 27 +- .../index/mapper/UpdateMappingTests.java | 49 +- .../query/GeoShapeQueryBuilderTests.java | 3 +- .../query/IntervalQueryBuilderTests.java | 3 +- .../index/query/MatchQueryBuilderTests.java | 5 +- .../query/MoreLikeThisQueryBuilderTests.java | 3 +- .../index/query/NestedQueryBuilderTests.java | 39 +- .../query/QueryStringQueryBuilderTests.java | 11 +- .../index/query/RangeQueryRewriteTests.java | 47 +- .../query/SpanFirstQueryBuilderTests.java | 5 +- .../query/SpanMultiTermQueryBuilderTests.java | 3 +- .../index/query/SpanNotQueryBuilderTests.java | 9 +- .../index/query/TermsQueryBuilderTests.java | 14 +- .../query/TermsSetQueryBuilderTests.java | 3 +- .../FunctionScoreQueryBuilderTests.java | 76 +- .../index/reindex/BulkByScrollTaskTests.java | 7 +- .../index/search/MultiMatchQueryTests.java | 50 +- .../index/shard/IndexShardTests.java | 18 +- .../shard/PrimaryReplicaSyncerTests.java | 3 +- .../index/similarity/SimilarityTests.java | 22 +- ...oteStoreShardShallowCopySnapshotTests.java | 3 +- .../index/translog/LocalTranslogTests.java | 3 +- .../nodesinfo/NodeInfoStreamingTests.java | 3 +- .../persistent/TestPersistentTasksPlugin.java | 2 +- .../rest/action/RestActionsTests.java | 3 +- .../org/opensearch/script/ScriptTests.java | 12 +- .../script/StoredScriptSourceTests.java | 3 +- .../opensearch/script/StoredScriptTests.java | 30 +- .../search/NestedIdentityTests.java | 5 +- .../org/opensearch/search/SearchHitTests.java | 3 +- .../opensearch/search/SearchHitsTests.java | 3 +- .../search/SearchSortValuesTests.java | 5 +- .../terms/SignificanceHeuristicTests.java | 3 +- .../metrics/AbstractGeoTestCase.java | 3 +- .../metrics/AbstractPercentilesTestCase.java | 3 +- .../metrics/InternalStatsTests.java | 5 +- .../InternalPercentilesBucketTests.java | 3 +- .../highlight/HighlightFieldTests.java | 5 +- .../search/geo/GeoShapeQueryTests.java | 67 +- .../internal/ShardSearchRequestTests.java | 4 +- .../search/profile/ProfileResultTests.java | 13 +- .../profile/query/CollectorResultTests.java | 11 +- .../search/sort/SortBuilderTests.java | 3 +- .../search/sort/SortValueTests.java | 2 +- .../CategoryContextMappingTests.java | 270 ++-- .../RemoteClusterConnectionTests.java | 8 +- .../index/engine/EngineTestCase.java | 24 +- .../index/mapper/FieldMapperTestCase.java | 3 +- .../index/mapper/FieldMapperTestCase2.java | 5 +- .../index/mapper/MapperTestCase.java | 5 +- .../aggregations/BaseAggregationTestCase.java | 2 +- .../AbstractSnapshotIntegTestCase.java | 4 +- .../AbstractBroadcastResponseTestCase.java | 2 +- .../test/AbstractBuilderTestCase.java | 71 +- .../test/AbstractQueryTestCase.java | 8 +- .../test/rest/OpenSearchRestTestCase.java | 18 +- .../rest/yaml/ClientYamlTestResponse.java | 3 +- .../OpenSearchClientYamlSuiteTestCase.java | 4 +- 350 files changed, 4002 insertions(+), 4410 deletions(-) delete mode 100644 server/src/main/java/org/opensearch/common/Strings.java delete mode 100644 server/src/test/java/org/opensearch/common/StringsTests.java diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java b/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java index cffed98fce3aa..5f0931d98ee9d 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/indices/CreateIndexRequest.java @@ -38,7 +38,6 @@ import org.opensearch.action.support.ActiveShardCount; import org.opensearch.client.TimedRequest; import org.opensearch.client.Validatable; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.settings.Settings; @@ -135,7 +134,7 @@ public CreateIndexRequest settings(String source, MediaType mediaType) { * Allows to set the settings using a json builder. */ public CreateIndexRequest settings(XContentBuilder builder) { - settings(Strings.toString(builder), builder.contentType()); + settings(builder.toString(), builder.contentType()); return this; } diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotLifecyclePolicy.java b/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotLifecyclePolicy.java index 3566d6aef1ac3..a6f8bfdee4d68 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotLifecyclePolicy.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotLifecyclePolicy.java @@ -33,9 +33,9 @@ package org.opensearch.client.slm; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.ParseField; +import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.ConstructingObjectParser; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotLifecyclePolicyMetadata.java b/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotLifecyclePolicyMetadata.java index c0b542557159f..0fcb6f7c6a29e 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotLifecyclePolicyMetadata.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotLifecyclePolicyMetadata.java @@ -33,9 +33,9 @@ package org.opensearch.client.slm; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.ParseField; +import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.ConstructingObjectParser; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotLifecycleStats.java b/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotLifecycleStats.java index 2126ced76b5ae..a2e43325ccd6f 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotLifecycleStats.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotLifecycleStats.java @@ -32,10 +32,10 @@ package org.opensearch.client.slm; -import org.opensearch.common.Strings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.ParseField; +import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.ConstructingObjectParser; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.ToXContentObject; diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotRetentionConfiguration.java b/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotRetentionConfiguration.java index 2a7ab6bb04095..9982c0f2cef7d 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotRetentionConfiguration.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/slm/SnapshotRetentionConfiguration.java @@ -33,10 +33,10 @@ package org.opensearch.client.slm; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.ParseField; +import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.ConstructingObjectParser; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java index dd793fdbb7ff6..2aeb47de150d6 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/BulkProcessorIT.java @@ -42,7 +42,6 @@ import org.opensearch.action.get.MultiGetResponse; import org.opensearch.action.index.IndexRequest; import org.opensearch.action.search.SearchRequest; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.core.common.unit.ByteSizeValue; @@ -423,7 +422,7 @@ private static BytesArray bytesBulkRequest(String localIndex, int id) throws IOE XContentBuilder source = jsonBuilder().startObject().field("field", randomRealisticUnicodeOfLengthBetween(1, 30)).endObject(); - String request = Strings.toString(action) + "\n" + Strings.toString(source) + "\n"; + String request = action + "\n" + source + "\n"; return new BytesArray(request); } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java index fbfab016b1ead..233ea0ca5f48d 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java @@ -52,7 +52,6 @@ import org.opensearch.action.search.SearchScrollRequest; import org.opensearch.client.core.CountRequest; import org.opensearch.client.core.CountResponse; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.XContentBuilder; @@ -769,7 +768,7 @@ public void testSearchScroll() throws Exception { for (int i = 0; i < 100; i++) { XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject(); Request doc = new Request(HttpPut.METHOD_NAME, "/test/_doc/" + Integer.toString(i)); - doc.setJsonEntity(Strings.toString(builder)); + doc.setJsonEntity(builder.toString()); client().performRequest(doc); } client().performRequest(new Request(HttpPost.METHOD_NAME, "/test/_refresh")); @@ -837,7 +836,7 @@ public void testSearchWithPit() throws Exception { for (int i = 0; i < 100; i++) { XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject(); Request doc = new Request(HttpPut.METHOD_NAME, "/test/_doc/" + Integer.toString(i)); - doc.setJsonEntity(Strings.toString(builder)); + doc.setJsonEntity(builder.toString()); client().performRequest(doc); } client().performRequest(new Request(HttpPost.METHOD_NAME, "/test/_refresh")); diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java index 178d9296bd242..010b16a3f5720 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java @@ -74,7 +74,7 @@ import org.opensearch.client.core.TermVectorsResponse; import org.opensearch.client.indices.CreateIndexRequest; import org.opensearch.client.indices.CreateIndexResponse; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.settings.Settings; import org.opensearch.core.common.unit.ByteSizeUnit; @@ -298,15 +298,14 @@ public void testUpdate() throws Exception { Request request = new Request("POST", "/_scripts/increment-field"); request.setJsonEntity( - Strings.toString( - JsonXContent.contentBuilder() - .startObject() - .startObject("script") - .field("lang", "painless") - .field("source", "ctx._source.field += params.count") - .endObject() - .endObject() - ) + JsonXContent.contentBuilder() + .startObject() + .startObject("script") + .field("lang", "painless") + .field("source", "ctx._source.field += params.count") + .endObject() + .endObject() + .toString() ); Response response = client().performRequest(request); assertEquals(RestStatus.OK.getStatus(), response.getStatusLine().getStatusCode()); diff --git a/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/NodeRestUsageIT.java b/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/NodeRestUsageIT.java index d397a1c967ad0..0f21984e5d8b9 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/NodeRestUsageIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/NodeRestUsageIT.java @@ -35,8 +35,8 @@ import org.opensearch.client.Request; import org.opensearch.client.Response; import org.opensearch.client.ResponseException; -import org.opensearch.common.Strings; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.Strings; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.builder.SearchSourceBuilder; diff --git a/libs/core/src/main/java/org/opensearch/core/common/Strings.java b/libs/core/src/main/java/org/opensearch/core/common/Strings.java index f0ca12a307313..6227716af9cc9 100644 --- a/libs/core/src/main/java/org/opensearch/core/common/Strings.java +++ b/libs/core/src/main/java/org/opensearch/core/common/Strings.java @@ -9,11 +9,17 @@ package org.opensearch.core.common; import org.apache.lucene.util.BytesRefBuilder; +import org.opensearch.ExceptionsHelper; +import org.opensearch.OpenSearchException; import org.opensearch.common.Nullable; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.util.CollectionUtils; +import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; import java.io.BufferedReader; +import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; @@ -680,6 +686,77 @@ public static boolean isAllOrWildcard(String data) { return "_all".equals(data) || "*".equals(data); } + /** + * Return a {@link String} that is the json representation of the provided {@link ToXContent}. + * Wraps the output into an anonymous object if needed. The content is not pretty-printed + * nor human readable. + */ + public static String toString(MediaType mediaType, ToXContent toXContent) { + return toString(mediaType, toXContent, false, false); + } + + /** + * Return a {@link String} that is the json representation of the provided {@link ToXContent}. + * Wraps the output into an anonymous object if needed. + * Allows to configure the params. + * The content is not pretty-printed nor human readable. + */ + public static String toString(MediaType mediaType, ToXContent toXContent, ToXContent.Params params) { + return toString(mediaType, toXContent, params, false, false); + } + + /** + * Return a {@link String} that is the json representation of the provided {@link ToXContent}. + * Wraps the output into an anonymous object if needed. Allows to control whether the outputted + * json needs to be pretty printed and human readable. + * + */ + public static String toString(MediaType mediaType, ToXContent toXContent, boolean pretty, boolean human) { + return toString(mediaType, toXContent, ToXContent.EMPTY_PARAMS, pretty, human); + } + + /** + * Return a {@link String} that is the json representation of the provided {@link ToXContent}. + * Wraps the output into an anonymous object if needed. + * Allows to configure the params. + * Allows to control whether the outputted json needs to be pretty printed and human readable. + */ + private static String toString(MediaType mediaType, ToXContent toXContent, ToXContent.Params params, boolean pretty, boolean human) { + try { + XContentBuilder builder = createBuilder(mediaType, pretty, human); + if (toXContent.isFragment()) { + builder.startObject(); + } + toXContent.toXContent(builder, params); + if (toXContent.isFragment()) { + builder.endObject(); + } + return builder.toString(); + } catch (IOException e) { + try { + XContentBuilder builder = createBuilder(mediaType, pretty, human); + builder.startObject(); + builder.field("error", "error building toString out of XContent: " + e.getMessage()); + builder.field("stack_trace", ExceptionsHelper.stackTrace(e)); + builder.endObject(); + return builder.toString(); + } catch (IOException e2) { + throw new OpenSearchException("cannot generate error message for deserialization", e); + } + } + } + + private static XContentBuilder createBuilder(MediaType mediaType, boolean pretty, boolean human) throws IOException { + XContentBuilder builder = XContentBuilder.builder(mediaType.xContent()); + if (pretty) { + builder.prettyPrint(); + } + if (human) { + builder.humanReadable(true); + } + return builder; + } + /** * Truncates string to a length less than length. Backtracks to throw out * high surrogates. diff --git a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java index 061837d27ed0a..dfd1449ef0e0b 100644 --- a/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java +++ b/libs/core/src/main/java/org/opensearch/core/xcontent/XContentBuilder.java @@ -32,6 +32,8 @@ package org.opensearch.core.xcontent; +import org.opensearch.core.common.bytes.BytesReference; + import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.Flushable; @@ -151,6 +153,14 @@ public static XContentBuilder builder(XContent xContent, Set includes, S DATE_TRANSFORMERS = Collections.unmodifiableMap(dateTransformers); } + /** + * Returns a string representation of the builder (only applicable for text based xcontent). + */ + @Override + public String toString() { + return BytesReference.bytes(this).utf8ToString(); + } + /** * The writer interface for the serializable content builder * diff --git a/libs/core/src/test/java/org/opensearch/core/action/support/DefaultShardOperationFailedExceptionTests.java b/libs/core/src/test/java/org/opensearch/core/action/support/DefaultShardOperationFailedExceptionTests.java index d037b062e95d0..9801e9cbcdb44 100644 --- a/libs/core/src/test/java/org/opensearch/core/action/support/DefaultShardOperationFailedExceptionTests.java +++ b/libs/core/src/test/java/org/opensearch/core/action/support/DefaultShardOperationFailedExceptionTests.java @@ -39,7 +39,7 @@ import org.apache.lucene.store.LockObtainFailedException; import org.opensearch.OpenSearchException; import org.opensearch.action.support.broadcast.BroadcastShardOperationFailedException; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; diff --git a/libs/core/src/test/java/org/opensearch/core/common/StringsTests.java b/libs/core/src/test/java/org/opensearch/core/common/StringsTests.java index ca0bd788c1991..b79bb6fc89f9e 100644 --- a/libs/core/src/test/java/org/opensearch/core/common/StringsTests.java +++ b/libs/core/src/test/java/org/opensearch/core/common/StringsTests.java @@ -9,8 +9,14 @@ package org.opensearch.core.common; import org.opensearch.common.util.set.Sets; +import org.opensearch.core.xcontent.MediaTypeRegistry; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.test.OpenSearchTestCase; +import java.util.Collections; + +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; /** tests for Strings utility class */ @@ -63,4 +69,49 @@ public void testSplitStringToSet() { assertEquals(Strings.tokenizeByCommaToSet(" aa "), Sets.newHashSet("aa")); assertEquals(Strings.tokenizeByCommaToSet(" "), Sets.newHashSet()); } + + public void testToStringToXContent() { + final ToXContent toXContent; + final boolean error; + if (randomBoolean()) { + if (randomBoolean()) { + error = false; + toXContent = (builder, params) -> builder.field("ok", "here").field("catastrophe", ""); + } else { + error = true; + toXContent = (builder, params) -> builder.startObject().field("ok", "here").field("catastrophe", "").endObject(); + } + } else { + if (randomBoolean()) { + error = false; + toXContent = (ToXContentObject) (builder, params) -> builder.startObject() + .field("ok", "here") + .field("catastrophe", "") + .endObject(); + } else { + error = true; + toXContent = (ToXContentObject) (builder, params) -> builder.field("ok", "here").field("catastrophe", ""); + } + } + + String toString = Strings.toString(MediaTypeRegistry.JSON, toXContent); + if (error) { + assertThat(toString, containsString("\"error\":\"error building toString out of XContent:")); + assertThat(toString, containsString("\"stack_trace\":")); + } else { + assertThat(toString, containsString("\"ok\":\"here\"")); + assertThat(toString, containsString("\"catastrophe\":\"\"")); + } + } + + public void testToStringToXContentWithOrWithoutParams() { + ToXContent toXContent = (builder, params) -> builder.field("color_from_param", params.param("color", "red")); + // Rely on the default value of "color" param when params are not passed + assertThat(Strings.toString(MediaTypeRegistry.JSON, toXContent), containsString("\"color_from_param\":\"red\"")); + // Pass "color" param explicitly + assertThat( + Strings.toString(MediaTypeRegistry.JSON, toXContent, new ToXContent.MapParams(Collections.singletonMap("color", "blue"))), + containsString("\"color_from_param\":\"blue\"") + ); + } } diff --git a/libs/x-content/src/test/java/org/opensearch/common/xcontent/ObjectParserTests.java b/libs/x-content/src/test/java/org/opensearch/common/xcontent/ObjectParserTests.java index cd59bf59fe15d..6e7de4aa6bfe5 100644 --- a/libs/x-content/src/test/java/org/opensearch/common/xcontent/ObjectParserTests.java +++ b/libs/x-content/src/test/java/org/opensearch/common/xcontent/ObjectParserTests.java @@ -33,7 +33,6 @@ import org.opensearch.common.CheckedFunction; import org.opensearch.core.ParseField; -import org.opensearch.common.Strings; import org.opensearch.core.xcontent.XContentParserUtils; import org.opensearch.core.xcontent.ObjectParser; import org.opensearch.core.xcontent.ObjectParser.NamedObjectParser; @@ -449,7 +448,7 @@ public void testAllVariants() throws IOException { } builder.field("string_or_null", nullValue ? null : "5"); builder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder)); + XContentParser parser = createParser(JsonXContent.jsonXContent, builder.toString()); class TestStruct { int int_field; int nullableIntField; diff --git a/libs/x-content/src/test/java/org/opensearch/common/xcontent/SimpleStruct.java b/libs/x-content/src/test/java/org/opensearch/common/xcontent/SimpleStruct.java index a4aca80918284..1d2a66ea1f78f 100644 --- a/libs/x-content/src/test/java/org/opensearch/common/xcontent/SimpleStruct.java +++ b/libs/x-content/src/test/java/org/opensearch/common/xcontent/SimpleStruct.java @@ -33,7 +33,7 @@ package org.opensearch.common.xcontent; import org.opensearch.core.ParseField; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.ConstructingObjectParser; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; diff --git a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java index 3552514af0aa8..fab61753eb739 100644 --- a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java +++ b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java @@ -37,7 +37,6 @@ import com.fasterxml.jackson.dataformat.yaml.JacksonYAMLParseException; import org.opensearch.common.CheckedSupplier; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.xcontent.cbor.CborXContent; import org.opensearch.common.xcontent.json.JsonXContent; @@ -366,7 +365,7 @@ public void testReadBooleans() throws IOException { public void testEmptyList() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startArray("some_array").endArray().endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.toString())) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); @@ -388,7 +387,7 @@ public void testSimpleList() throws IOException { .endArray() .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.toString())) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); @@ -416,7 +415,7 @@ public void testNestedList() throws IOException { .endArray() .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.toString())) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); @@ -440,7 +439,7 @@ public void testNestedMapInList() throws IOException { .endArray() .endObject(); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, builder.toString())) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals("some_array", parser.currentName()); @@ -516,7 +515,7 @@ public void testSubParserObject() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); int numberOfTokens; numberOfTokens = generateRandomObjectForMarking(builder); - String content = Strings.toString(builder); + String content = builder.toString(); try (XContentParser parser = createParser(JsonXContent.jsonXContent, content)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); @@ -564,7 +563,7 @@ public void testSubParserArray() throws IOException { builder.endArray(); builder.endObject(); - String content = Strings.toString(builder); + String content = builder.toString(); try (XContentParser parser = createParser(JsonXContent.jsonXContent, content)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); @@ -597,7 +596,7 @@ public void testSubParserArray() throws IOException { public void testCreateSubParserAtAWrongPlace() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); generateRandomObjectForMarking(builder); - String content = Strings.toString(builder); + String content = builder.toString(); try (XContentParser parser = createParser(JsonXContent.jsonXContent, content)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); @@ -611,7 +610,7 @@ public void testCreateSubParserAtAWrongPlace() throws IOException { public void testCreateRootSubParser() throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); int numberOfTokens = generateRandomObjectForMarking(builder); - String content = Strings.toString(builder); + String content = builder.toString(); try (XContentParser parser = createParser(JsonXContent.jsonXContent, content)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java index f7c9747e1a163..5df5912daf461 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java @@ -10,7 +10,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; -import org.opensearch.common.Strings; import org.opensearch.common.document.DocumentField; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; @@ -256,7 +255,7 @@ public void setupSuiteScopeCluster() throws Exception { long totalHits = response.getHits().getTotalHits().value; XContentBuilder builder = XContentFactory.jsonBuilder(); response.toXContent(builder, ToXContent.EMPTY_PARAMS); - logger.info("Full high_card_idx Response Content:\n{ {} }", Strings.toString(builder)); + logger.info("Full high_card_idx Response Content:\n{ {} }", builder.toString()); for (int i = 0; i < totalHits; i++) { SearchHit searchHit = response.getHits().getAt(i); assertThat("Hit " + i + " with id: " + searchHit.getId(), searchHit.getIndex(), equalTo("high_card_idx")); diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java index fbb8ebdec384c..bb11e493ba3d1 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java @@ -34,7 +34,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchRequest; -import org.opensearch.common.Strings; import org.opensearch.index.IndexNotFoundException; import org.opensearch.plugins.Plugin; import org.opensearch.script.ScriptType; @@ -72,15 +71,14 @@ public void testBasic() throws Exception { } indexRandom(true, indexRequestBuilders); - final String template = Strings.toString( - jsonBuilder().startObject() - .startObject("query") - .startObject("{{query_type}}") - .field("{{field_name}}", "{{field_value}}") - .endObject() - .endObject() - .endObject() - ); + final String template = jsonBuilder().startObject() + .startObject("query") + .startObject("{{query_type}}") + .field("{{field_name}}", "{{field_value}}") + .endObject() + .endObject() + .endObject() + .toString(); MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest(); diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomMustacheFactory.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomMustacheFactory.java index a4be60d2b6900..0cf1ed525fbfe 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomMustacheFactory.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/CustomMustacheFactory.java @@ -44,7 +44,6 @@ import com.github.mustachejava.codes.IterableCode; import com.github.mustachejava.codes.WriteCode; -import org.opensearch.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentType; @@ -228,7 +227,7 @@ protected Function createFunction(Object resolved) { // Do not handle as JSON return oh.stringify(resolved); } - return Strings.toString(builder); + return builder.toString(); } catch (IOException e) { throw new MustacheException("Failed to convert object to JSON", e); } diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java index f31e5be078a28..49f5d4194d446 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java @@ -36,7 +36,7 @@ import org.opensearch.action.ActionResponse; import org.opensearch.action.search.MultiSearchResponse; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java index c963ea7ba7da9..166778bd02cee 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java @@ -38,13 +38,12 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.core.ParseField; import org.opensearch.core.common.ParsingException; -import org.opensearch.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ObjectParser; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.script.ScriptType; @@ -207,8 +206,8 @@ public ActionRequestValidationException validate() { request.setScriptType(ScriptType.INLINE); if (parser.currentToken() == XContentParser.Token.START_OBJECT) { // convert the template to json which is the only supported XContentType (see CustomMustacheFactory#createEncoder) - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - request.setScript(Strings.toString(builder.copyCurrentStructure(parser))); + try (XContentBuilder builder = MediaTypeRegistry.JSON.contentBuilder()) { + request.setScript(builder.copyCurrentStructure(parser).toString()); } catch (IOException e) { throw new ParsingException(parser.getTokenLocation(), "Could not parse inline template", e); } diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureFieldMapperTests.java index 6412059075e5c..fee9471444c19 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureFieldMapperTests.java @@ -38,7 +38,6 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.opensearch.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.plugins.Plugin; @@ -91,7 +90,7 @@ protected void minimalMapping(XContentBuilder b) throws IOException { public void testDefaults() throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); - assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString()); + assertEquals(fieldMapping(this::minimalMapping).toString(), mapper.mappingSource().toString()); ParsedDocument doc1 = mapper.parse(source(b -> b.field("field", 10))); IndexableField[] fields = doc1.rootDoc().getFields("_feature"); diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java index 63b1b4760b6fe..d35368350592c 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java @@ -32,11 +32,10 @@ package org.opensearch.index.mapper; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.compress.CompressedXContent; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.index.IndexService; import org.opensearch.plugins.Plugin; import org.opensearch.test.OpenSearchSingleNodeTestCase; @@ -61,18 +60,17 @@ protected Collection> getPlugins() { } public void testBasics() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "rank_feature") - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = MediaTypeRegistry.JSON.contentBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "rank_feature") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -85,10 +83,12 @@ public void testBasics() throws Exception { * and parsing of a document fails if the document contains these meta-fields. */ public void testDocumentParsingFailsOnMetaField() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject()); + String mapping = MediaTypeRegistry.JSON.contentBuilder().startObject().startObject("_doc").endObject().endObject().toString(); DocumentMapper mapper = parser.parse("_doc", new CompressedXContent(mapping)); String rfMetaField = RankFeatureMetaFieldMapper.CONTENT_TYPE; - BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(rfMetaField, 0).endObject()); + BytesReference bytes = BytesReference.bytes( + MediaTypeRegistry.JSON.contentBuilder().startObject().field(rfMetaField, 0).endObject() + ); MapperParsingException e = expectThrows( MapperParsingException.class, () -> mapper.parse(new SourceToParse("test", "1", bytes, XContentType.JSON)) diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java index 6c844bae73da4..b95572835e612 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java @@ -34,7 +34,6 @@ import org.apache.lucene.document.FeatureField; import org.apache.lucene.index.IndexableField; -import org.opensearch.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.plugins.Plugin; import org.hamcrest.Matchers; @@ -79,7 +78,7 @@ protected boolean supportsMeta() { public void testDefaults() throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); - assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString()); + assertEquals(fieldMapping(this::minimalMapping).toString(), mapper.mappingSource().toString()); ParsedDocument doc1 = mapper.parse(source(this::writeField)); diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java index 390f10c0684bd..4de11d7f64e8e 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java @@ -34,7 +34,6 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -95,7 +94,7 @@ public void testExistsQueryDocValuesDisabled() throws IOException { public void testDefaults() throws Exception { XContentBuilder mapping = fieldMapping(b -> b.field("type", "scaled_float").field("scaling_factor", 10.0)); DocumentMapper mapper = createDocumentMapper(mapping); - assertEquals(Strings.toString(mapping), mapper.mappingSource().toString()); + assertEquals(mapping.toString(), mapper.mappingSource().toString()); ParsedDocument doc = mapper.parse(source(b -> b.field("field", 123))); IndexableField[] fields = doc.rootDoc().getFields("field"); diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java index 5e67aaa2ed246..551bd38b65f59 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java @@ -50,9 +50,9 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; -import org.opensearch.common.Strings; import org.opensearch.common.lucene.search.MultiPhrasePrefixQuery; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; import org.opensearch.index.analysis.AnalyzerScope; diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java index f57aac8a244b7..a8d672c025af0 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java @@ -36,7 +36,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.opensearch.common.Strings; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.index.mapper.MapperExtrasModulePlugin; import org.opensearch.index.mapper.MapperService; @@ -61,16 +60,14 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws mapperService.merge( "_doc", new CompressedXContent( - Strings.toString( - PutMappingRequest.simpleMapping( - "my_feature_field", - "type=rank_feature", - "my_negative_feature_field", - "type=rank_feature,positive_score_impact=false", - "my_feature_vector_field", - "type=rank_features" - ) - ) + PutMappingRequest.simpleMapping( + "my_feature_field", + "type=rank_feature", + "my_negative_feature_field", + "type=rank_feature,positive_score_impact=false", + "my_feature_vector_field", + "type=rank_features" + ).toString() ), MapperService.MergeReason.MAPPING_UPDATE ); diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java index 34a6af6ee3639..9b7ad4425bd5e 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java @@ -32,7 +32,6 @@ package org.opensearch.join.query; import org.opensearch.action.index.IndexRequestBuilder; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentHelper; @@ -85,7 +84,7 @@ protected IndexRequestBuilder createIndexRequest(String index, String type, Stri protected IndexRequestBuilder createIndexRequest(String index, String type, String id, String parentId, XContentBuilder builder) throws IOException { - Map source = XContentHelper.convertToMap(JsonXContent.jsonXContent, Strings.toString(builder), false); + Map source = XContentHelper.convertToMap(JsonXContent.jsonXContent, builder.toString(), false); return createIndexRequest(index, type, id, parentId, source); } diff --git a/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java b/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java index cd8f18b679750..213ba43ee34cd 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java @@ -32,11 +32,10 @@ package org.opensearch.join.mapper; -import org.opensearch.common.Strings; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.index.IndexService; import org.opensearch.index.mapper.DocumentMapper; import org.opensearch.index.mapper.MapperException; @@ -60,19 +59,18 @@ protected Collection> getPlugins() { } public void testSingleLevel() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); IndexService service = createIndex("test"); DocumentMapper docMapper = service.mapperService() .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); @@ -133,19 +131,18 @@ public void testSingleLevel() throws Exception { } public void testParentIdSpecifiedAsNumber() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); IndexService service = createIndex("test"); DocumentMapper docMapper = service.mapperService() .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); @@ -190,20 +187,19 @@ public void testParentIdSpecifiedAsNumber() throws Exception { } public void testMultipleLevels() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .field("child", "grand_child") - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .field("child", "grand_child") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); IndexService service = createIndex("test"); DocumentMapper docMapper = service.mapperService() .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); @@ -323,39 +319,37 @@ public void testMultipleLevels() throws Exception { } public void testUpdateRelations() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .array("child", "grand_child1", "grand_child2") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); + IndexService indexService = createIndex("test"); + DocumentMapper docMapper = indexService.mapperService() + .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService())); + + { + final String updateMapping = XContentFactory.jsonBuilder() .startObject() .startObject("properties") .startObject("join_field") .field("type", "join") .startObject("relations") - .field("parent", "child") .array("child", "grand_child1", "grand_child2") .endObject() .endObject() .endObject() .endObject() - ); - IndexService indexService = createIndex("test"); - DocumentMapper docMapper = indexService.mapperService() - .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService())); - - { - final String updateMapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .array("child", "grand_child1", "grand_child2") - .endObject() - .endObject() - .endObject() - .endObject() - ); + .toString(); IllegalArgumentException exc = expectThrows( IllegalArgumentException.class, () -> indexService.mapperService() @@ -365,20 +359,19 @@ public void testUpdateRelations() throws Exception { } { - final String updateMapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .field("child", "grand_child1") - .endObject() - .endObject() - .endObject() - .endObject() - ); + final String updateMapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .field("child", "grand_child1") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); IllegalArgumentException exc = expectThrows( IllegalArgumentException.class, () -> indexService.mapperService() @@ -388,21 +381,20 @@ public void testUpdateRelations() throws Exception { } { - final String updateMapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("uber_parent", "parent") - .field("parent", "child") - .array("child", "grand_child1", "grand_child2") - .endObject() - .endObject() - .endObject() - .endObject() - ); + final String updateMapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .field("uber_parent", "parent") + .field("parent", "child") + .array("child", "grand_child1", "grand_child2") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); IllegalArgumentException exc = expectThrows( IllegalArgumentException.class, () -> indexService.mapperService() @@ -412,21 +404,20 @@ public void testUpdateRelations() throws Exception { } { - final String updateMapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .array("child", "grand_child1", "grand_child2") - .field("grand_child2", "grand_grand_child") - .endObject() - .endObject() - .endObject() - .endObject() - ); + final String updateMapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .array("child", "grand_child1", "grand_child2") + .field("grand_child2", "grand_grand_child") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); IllegalArgumentException exc = expectThrows( IllegalArgumentException.class, () -> indexService.mapperService() @@ -436,20 +427,19 @@ public void testUpdateRelations() throws Exception { } { - final String updateMapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .array("parent", "child", "child2") - .array("child", "grand_child1", "grand_child2") - .endObject() - .endObject() - .endObject() - .endObject() - ); + final String updateMapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .array("parent", "child", "child2") + .array("child", "grand_child1", "grand_child2") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); docMapper = indexService.mapperService() .merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE); ParentJoinFieldMapper mapper = ParentJoinFieldMapper.getMapper(indexService.mapperService()); @@ -462,21 +452,20 @@ public void testUpdateRelations() throws Exception { } { - final String updateMapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .array("parent", "child", "child2") - .array("child", "grand_child1", "grand_child2") - .array("other", "child_other1", "child_other2") - .endObject() - .endObject() - .endObject() - .endObject() - ); + final String updateMapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .array("parent", "child", "child2") + .array("child", "grand_child1", "grand_child2") + .array("other", "child_other1", "child_other2") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); docMapper = indexService.mapperService() .merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE); ParentJoinFieldMapper mapper = ParentJoinFieldMapper.getMapper(indexService.mapperService()); @@ -492,23 +481,22 @@ public void testUpdateRelations() throws Exception { } public void testInvalidJoinFieldInsideObject() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("object") - .startObject("properties") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("object") + .startObject("properties") + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); IndexService indexService = createIndex("test"); MapperParsingException exc = expectThrows( MapperParsingException.class, @@ -521,24 +509,23 @@ public void testInvalidJoinFieldInsideObject() throws Exception { } public void testInvalidJoinFieldInsideMultiFields() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("number") - .field("type", "integer") - .startObject("fields") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("number") + .field("type", "integer") + .startObject("fields") + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); IndexService indexService = createIndex("test"); MapperParsingException exc = expectThrows( MapperParsingException.class, @@ -553,26 +540,25 @@ public void testInvalidJoinFieldInsideMultiFields() throws Exception { public void testMultipleJoinFields() throws Exception { IndexService indexService = createIndex("test"); { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .field("child", "grand_child") - .endObject() - .endObject() - .startObject("another_join_field") - .field("type", "join") - .startObject("relations") - .field("product", "item") - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .field("child", "grand_child") + .endObject() + .endObject() + .startObject("another_join_field") + .field("type", "join") + .startObject("relations") + .field("product", "item") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); MapperParsingException exc = expectThrows( MapperParsingException.class, () -> indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE) @@ -581,43 +567,7 @@ public void testMultipleJoinFields() throws Exception { } { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("join_field") - .field("type", "join") - .startObject("relations") - .field("parent", "child") - .field("child", "grand_child") - .endObject() - .endObject() - .endObject() - .endObject() - ); - indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - String updateMapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("another_join_field") - .field("type", "join") - .endObject() - .endObject() - .endObject() - ); - MapperParsingException exc = expectThrows( - MapperParsingException.class, - () -> indexService.mapperService() - .merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE) - ); - assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined more than once")); - } - } - - public void testEagerGlobalOrdinals() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() + String mapping = XContentFactory.jsonBuilder() .startObject() .startObject("properties") .startObject("join_field") @@ -629,7 +579,40 @@ public void testEagerGlobalOrdinals() throws Exception { .endObject() .endObject() .endObject() - ); + .toString(); + indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); + String updateMapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("another_join_field") + .field("type", "join") + .endObject() + .endObject() + .endObject() + .toString(); + MapperParsingException exc = expectThrows( + MapperParsingException.class, + () -> indexService.mapperService() + .merge("type", new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE) + ); + assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined more than once")); + } + } + + public void testEagerGlobalOrdinals() throws Exception { + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") + .field("type", "join") + .startObject("relations") + .field("parent", "child") + .field("child", "grand_child") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); IndexService service = createIndex("test"); DocumentMapper docMapper = service.mapperService() .merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); @@ -640,21 +623,20 @@ public void testEagerGlobalOrdinals() throws Exception { assertNotNull(service.mapperService().fieldType("join_field#child")); assertTrue(service.mapperService().fieldType("join_field#child").eagerGlobalOrdinals()); - mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("join_field") - .field("type", "join") - .field("eager_global_ordinals", false) - .startObject("relations") - .field("parent", "child") - .field("child", "grand_child") - .endObject() - .endObject() - .endObject() - .endObject() - ); + mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("join_field") + .field("type", "join") + .field("eager_global_ordinals", false) + .startObject("relations") + .field("parent", "child") + .field("child", "grand_child") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); service.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); assertFalse(service.mapperService().fieldType("join_field").eagerGlobalOrdinals()); assertNotNull(service.mapperService().fieldType("join_field#parent")); diff --git a/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java index 6610b103509b0..96220c247d909 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/query/HasChildQueryBuilderTests.java @@ -46,7 +46,6 @@ import org.apache.lucene.search.similarities.Similarity; import org.opensearch.OpenSearchException; import org.opensearch.Version; -import org.opensearch.common.Strings; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.XContentBuilder; @@ -141,7 +140,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws .endObject() .endObject(); - mapperService.merge(TYPE, new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(TYPE, new CompressedXContent(mapping.toString()), MapperService.MergeReason.MAPPING_UPDATE); } /** diff --git a/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java index 9921b6b040901..63af6873e14af 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/query/HasParentQueryBuilderTests.java @@ -37,7 +37,6 @@ import org.apache.lucene.search.join.ScoreMode; import org.opensearch.OpenSearchException; import org.opensearch.Version; -import org.opensearch.common.Strings; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.mapper.MapperService; @@ -122,7 +121,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws .endObject() .endObject(); - mapperService.merge(TYPE, new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(TYPE, new CompressedXContent(mapping.toString()), MapperService.MergeReason.MAPPING_UPDATE); } /** diff --git a/modules/parent-join/src/test/java/org/opensearch/join/query/ParentIdQueryBuilderTests.java b/modules/parent-join/src/test/java/org/opensearch/join/query/ParentIdQueryBuilderTests.java index 88da6a6953d1e..ec555448fd218 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/query/ParentIdQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/query/ParentIdQueryBuilderTests.java @@ -39,7 +39,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.opensearch.OpenSearchException; -import org.opensearch.common.Strings; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.mapper.MapperService; @@ -111,7 +110,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws .endObject() .endObject(); - mapperService.merge(TYPE, new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(TYPE, new CompressedXContent(mapping.toString()), MapperService.MergeReason.MAPPING_UPDATE); } @Override diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java index 1040d014483e1..9f80069b99c10 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java @@ -94,7 +94,6 @@ import org.opensearch.Version; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.CheckedFunction; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.compress.CompressedXContent; @@ -163,51 +162,49 @@ public void init() throws Exception { indexService = createIndex(indexName, Settings.EMPTY); mapperService = indexService.mapperService(); - String mapper = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("type") - .startObject("properties") - .startObject("int_field") - .field("type", "integer") - .endObject() - .startObject("long_field") - .field("type", "long") - .endObject() - .startObject("half_float_field") - .field("type", "half_float") - .endObject() - .startObject("float_field") - .field("type", "float") - .endObject() - .startObject("double_field") - .field("type", "double") - .endObject() - .startObject("ip_field") - .field("type", "ip") - .endObject() - .startObject("field") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapper = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject("int_field") + .field("type", "integer") + .endObject() + .startObject("long_field") + .field("type", "long") + .endObject() + .startObject("half_float_field") + .field("type", "half_float") + .endObject() + .startObject("float_field") + .field("type", "float") + .endObject() + .startObject("double_field") + .field("type", "double") + .endObject() + .startObject("ip_field") + .field("type", "ip") + .endObject() + .startObject("field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); documentMapper = mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE); String queryField = "query_field"; - String percolatorMapper = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("type") - .startObject("properties") - .startObject(queryField) - .field("type", "percolator") - .endObject() - .endObject() - .endObject() - .endObject() - ); + String percolatorMapper = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("properties") + .startObject(queryField) + .field("type", "percolator") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); mapperService.merge("type", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper().mappers().getMapper(queryField); fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType(); diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java index 34ed195cd0f23..93b78d67bf6f1 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java @@ -38,7 +38,6 @@ import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest; import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.GetResponse; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.compress.CompressedXContent; @@ -109,15 +108,13 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws mapperService.merge( docType, new CompressedXContent( - Strings.toString( - PutMappingRequest.simpleMapping(queryField, "type=percolator", aliasField, "type=alias,path=" + queryField) - ) + PutMappingRequest.simpleMapping(queryField, "type=percolator", aliasField, "type=alias,path=" + queryField).toString() ), MapperService.MergeReason.MAPPING_UPDATE ); mapperService.merge( docType, - new CompressedXContent(Strings.toString(PutMappingRequest.simpleMapping(TEXT_FIELD_NAME, "type=text"))), + new CompressedXContent(PutMappingRequest.simpleMapping(TEXT_FIELD_NAME, "type=text").toString()), MapperService.MergeReason.MAPPING_UPDATE ); } diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java index 748b79d70af07..677d169162c74 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java @@ -33,7 +33,6 @@ package org.opensearch.percolator; import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.xcontent.XContentType; @@ -50,7 +49,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws super.initializeAdditionalMappings(mapperService); mapperService.merge( "_doc", - new CompressedXContent(Strings.toString(PutMappingRequest.simpleMapping("some_nested_object", "type=nested"))), + new CompressedXContent(PutMappingRequest.simpleMapping("some_nested_object", "type=nested").toString()), MapperService.MergeReason.MAPPING_UPDATE ); } diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java index 77a4718b1d755..c5e2a1f68de9c 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java @@ -56,7 +56,6 @@ import org.opensearch.Version; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.collect.Tuple; @@ -163,67 +162,65 @@ public void init() throws Exception { indexService = createIndex("test"); mapperService = indexService.mapperService(); - String mapper = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("field") - .field("type", "text") - .endObject() - .startObject("field1") - .field("type", "text") - .endObject() - .startObject("field2") - .field("type", "text") - .endObject() - .startObject("_field3") - .field("type", "text") - .endObject() - .startObject("field4") - .field("type", "text") - .endObject() - .startObject("number_field1") - .field("type", "integer") - .endObject() - .startObject("number_field2") - .field("type", "long") - .endObject() - .startObject("number_field3") - .field("type", "long") - .endObject() - .startObject("number_field4") - .field("type", "half_float") - .endObject() - .startObject("number_field5") - .field("type", "float") - .endObject() - .startObject("number_field6") - .field("type", "double") - .endObject() - .startObject("number_field7") - .field("type", "ip") - .endObject() - .startObject("date_field") - .field("type", "date") - .endObject() - .endObject() - .endObject() - ); + String mapper = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("field") + .field("type", "text") + .endObject() + .startObject("field1") + .field("type", "text") + .endObject() + .startObject("field2") + .field("type", "text") + .endObject() + .startObject("_field3") + .field("type", "text") + .endObject() + .startObject("field4") + .field("type", "text") + .endObject() + .startObject("number_field1") + .field("type", "integer") + .endObject() + .startObject("number_field2") + .field("type", "long") + .endObject() + .startObject("number_field3") + .field("type", "long") + .endObject() + .startObject("number_field4") + .field("type", "half_float") + .endObject() + .startObject("number_field5") + .field("type", "float") + .endObject() + .startObject("number_field6") + .field("type", "double") + .endObject() + .startObject("number_field7") + .field("type", "ip") + .endObject() + .startObject("date_field") + .field("type", "date") + .endObject() + .endObject() + .endObject() + .toString(); mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE); } private void addQueryFieldMappings() throws Exception { fieldName = randomAlphaOfLength(4); - String percolatorMapper = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject(fieldName) - .field("type", "percolator") - .endObject() - .endObject() - .endObject() - ); + String percolatorMapper = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject(fieldName) + .field("type", "percolator") + .endObject() + .endObject() + .endObject() + .toString(); mapperService.merge( MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(percolatorMapper), @@ -710,17 +707,16 @@ public void testAllowNoAdditionalSettings() throws Exception { IndexService indexService = createIndex("test1", Settings.EMPTY); MapperService mapperService = indexService.mapperService(); - String percolatorMapper = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject(fieldName) - .field("type", "percolator") - .field("index", "no") - .endObject() - .endObject() - .endObject() - ); + String percolatorMapper = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject(fieldName) + .field("type", "percolator") + .field("index", "no") + .endObject() + .endObject() + .endObject() + .toString(); MapperParsingException e = expectThrows( MapperParsingException.class, () -> mapperService.merge( @@ -735,21 +731,20 @@ public void testAllowNoAdditionalSettings() throws Exception { // multiple percolator fields are allowed in the mapping, but only one field can be used at index time. public void testMultiplePercolatorFields() throws Exception { String typeName = MapperService.SINGLE_MAPPING_NAME; - String percolatorMapper = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject(typeName) - .startObject("properties") - .startObject("query_field1") - .field("type", "percolator") - .endObject() - .startObject("query_field2") - .field("type", "percolator") - .endObject() - .endObject() - .endObject() - .endObject() - ); + String percolatorMapper = XContentFactory.jsonBuilder() + .startObject() + .startObject(typeName) + .startObject("properties") + .startObject("query_field1") + .field("type", "percolator") + .endObject() + .startObject("query_field2") + .field("type", "percolator") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); @@ -775,23 +770,22 @@ public void testMultiplePercolatorFields() throws Exception { // percolator field can be nested under an object field, but only one query can be specified per document public void testNestedPercolatorField() throws Exception { String typeName = MapperService.SINGLE_MAPPING_NAME; - String percolatorMapper = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject(typeName) - .startObject("properties") - .startObject("object_field") - .field("type", "object") - .startObject("properties") - .startObject("query_field") - .field("type", "percolator") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ); + String percolatorMapper = XContentFactory.jsonBuilder() + .startObject() + .startObject(typeName) + .startObject("properties") + .startObject("object_field") + .field("type", "object") + .startObject("properties") + .startObject("query_field") + .field("type", "percolator") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); @@ -907,18 +901,17 @@ private void assertQueryBuilder(BytesRef actual, QueryBuilder expected) throws I } public void testEmptyName() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("properties") - .startObject("") - .field("type", "percolator") - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("properties") + .startObject("") + .field("type", "percolator") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); DocumentMapperParser parser = mapperService.documentMapperParser(); IllegalArgumentException e = expectThrows( @@ -951,7 +944,7 @@ public void testImplicitlySetDefaultScriptLang() throws Exception { BytesReference.bytes( XContentFactory.jsonBuilder() .startObject() - .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType()) + .rawField(fieldName, new BytesArray(query.toString()).streamInput(), query.contentType()) .endObject() ), XContentType.JSON @@ -998,7 +991,7 @@ public void testImplicitlySetDefaultScriptLang() throws Exception { BytesReference.bytes( XContentFactory.jsonBuilder() .startObject() - .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType()) + .rawField(fieldName, new BytesArray(query.toString()).streamInput(), query.contentType()) .endObject() ), XContentType.JSON diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalResponse.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalResponse.java index 4dbc348fe458e..68c61183a4486 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalResponse.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalResponse.java @@ -35,8 +35,8 @@ import org.opensearch.OpenSearchException; import org.opensearch.action.ActionResponse; import org.opensearch.core.ParseField; -import org.opensearch.common.Strings; import org.opensearch.common.collect.Tuple; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.xcontent.ConstructingObjectParser; diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalSpec.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalSpec.java index 9585e79e69cf1..44eeceb117794 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalSpec.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalSpec.java @@ -34,7 +34,7 @@ import org.opensearch.core.ParseField; import org.opensearch.core.common.ParsingException; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedDocument.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedDocument.java index 02ac9182c4f35..e91b8671d0804 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedDocument.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedDocument.java @@ -129,7 +129,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public String toString() { - return org.opensearch.common.Strings.toString(XContentType.JSON, this); + return Strings.toString(XContentType.JSON, this); } @Override diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedRequest.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedRequest.java index c5f899cbefdf6..78c2dbd33182f 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedRequest.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RatedRequest.java @@ -33,7 +33,7 @@ package org.opensearch.index.rankeval; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java index d97d5a3a7dcd5..edda52c57f280 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/DiscountedCumulativeGainTests.java @@ -33,7 +33,7 @@ package org.opensearch.index.rankeval; import org.opensearch.action.OriginalIndices; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.xcontent.MediaTypeRegistry; diff --git a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalSpecTests.java b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalSpecTests.java index c42c0722e0fae..5601c972375f1 100644 --- a/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalSpecTests.java +++ b/modules/rank-eval/src/test/java/org/opensearch/index/rankeval/RankEvalSpecTests.java @@ -32,7 +32,6 @@ package org.opensearch.index.rankeval; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; @@ -103,7 +102,7 @@ static RankEvalSpec createTestItem() { builder.startObject(); builder.field("field", randomAlphaOfLengthBetween(1, 5)); builder.endObject(); - script = Strings.toString(builder); + script = builder.toString(); } catch (IOException e) { // this shouldn't happen in tests, re-throw just not to swallow it throw new RuntimeException(e); diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java index 511c44ae3c2b6..68c30b616adfd 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java @@ -38,7 +38,6 @@ import org.opensearch.Version; import org.opensearch.action.search.SearchRequest; import org.opensearch.client.Request; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.unit.TimeValue; @@ -181,7 +180,7 @@ static Request initialSearch(SearchRequest searchRequest, BytesReference query, } entity.endObject(); - request.setJsonEntity(Strings.toString(entity)); + request.setJsonEntity(entity.toString()); } catch (IOException e) { throw new OpenSearchException("unexpected error building entity", e); } @@ -246,7 +245,7 @@ static Request scroll(String scroll, TimeValue keepAlive, Version remoteVersion) try (XContentBuilder entity = JsonXContent.contentBuilder()) { entity.startObject().field("scroll_id", scroll).endObject(); - request.setJsonEntity(Strings.toString(entity)); + request.setJsonEntity(entity.toString()); } catch (IOException e) { throw new OpenSearchException("failed to build scroll entity", e); } @@ -263,7 +262,7 @@ static Request clearScroll(String scroll, Version remoteVersion) { } try (XContentBuilder entity = JsonXContent.contentBuilder()) { entity.startObject().array("scroll_id", scroll).endObject(); - request.setJsonEntity(Strings.toString(entity)); + request.setJsonEntity(entity.toString()); } catch (IOException e) { throw new OpenSearchException("failed to build clear scroll entity", e); } diff --git a/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java b/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java index 705cbafd1bd3a..16c4ddf6aaabf 100644 --- a/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java +++ b/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java @@ -36,7 +36,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.opensearch.client.Request; import org.opensearch.client.Response; -import org.opensearch.common.Strings; import org.opensearch.common.io.PathUtils; import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.ToXContent; @@ -144,7 +143,7 @@ private static HttpEntity buildRepositorySettings(final String type, final Setti builder.endObject(); } builder.endObject(); - return new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); + return new StringEntity(builder.toString(), ContentType.APPLICATION_JSON); } } } diff --git a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java index 4b962401387b7..b4b15c22258de 100644 --- a/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java +++ b/modules/transport-netty4/src/javaRestTest/java/org/opensearch/rest/Netty4HeadBodyIsEmptyIT.java @@ -34,7 +34,6 @@ import org.opensearch.client.Request; import org.opensearch.client.Response; -import org.opensearch.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.test.rest.OpenSearchRestTestCase; import org.hamcrest.Matcher; @@ -68,7 +67,7 @@ private void createTestDoc(final String indexName) throws IOException { } builder.endObject(); Request request = new Request("PUT", "/" + indexName + "/_doc/" + "1"); - request.setJsonEntity(Strings.toString(builder)); + request.setJsonEntity(builder.toString()); client().performRequest(request); } } @@ -109,7 +108,7 @@ public void testAliasExists() throws IOException { builder.endObject(); Request request = new Request("POST", "/_aliases"); - request.setJsonEntity(Strings.toString(builder)); + request.setJsonEntity(builder.toString()); client().performRequest(request); headTestCase("/_alias/test_alias", emptyMap(), greaterThan(0)); headTestCase("/test/_alias/test_alias", emptyMap(), greaterThan(0)); @@ -136,7 +135,7 @@ public void testTemplateExists() throws IOException { builder.endObject(); Request request = new Request("PUT", "/_template/template"); - request.setJsonEntity(Strings.toString(builder)); + request.setJsonEntity(builder.toString()); client().performRequest(request); headTestCase("/_template/template", emptyMap(), greaterThan(0)); } @@ -163,7 +162,7 @@ public void testGetSourceAction() throws IOException { builder.endObject(); Request request = new Request("PUT", "/test-no-source"); - request.setJsonEntity(Strings.toString(builder)); + request.setJsonEntity(builder.toString()); client().performRequest(request); createTestDoc("test-no-source"); headTestCase("/test-no-source/_source/1", emptyMap(), NOT_FOUND.getStatus(), greaterThan(0)); diff --git a/plugins/analysis-icu/src/test/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperTests.java b/plugins/analysis-icu/src/test/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperTests.java index 37cb73e21b5d4..0a2f48f4215cb 100644 --- a/plugins/analysis-icu/src/test/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperTests.java +++ b/plugins/analysis-icu/src/test/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperTests.java @@ -39,7 +39,6 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.util.BytesRef; -import org.opensearch.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.plugin.analysis.icu.AnalysisICUPlugin; import org.opensearch.plugins.Plugin; @@ -96,7 +95,7 @@ protected void writeFieldValue(XContentBuilder builder) throws IOException { public void testDefaults() throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); - assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString()); + assertEquals(fieldMapping(this::minimalMapping).toString(), mapper.mappingSource().toString()); ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234"))); IndexableField[] fields = doc.rootDoc().getFields("field"); diff --git a/plugins/discovery-gce/qa/gce/src/yamlRestTest/java/org/opensearch/cloud/gce/GCEFixture.java b/plugins/discovery-gce/qa/gce/src/yamlRestTest/java/org/opensearch/cloud/gce/GCEFixture.java index f588316fdc32f..db23944b25369 100644 --- a/plugins/discovery-gce/qa/gce/src/yamlRestTest/java/org/opensearch/cloud/gce/GCEFixture.java +++ b/plugins/discovery-gce/qa/gce/src/yamlRestTest/java/org/opensearch/cloud/gce/GCEFixture.java @@ -33,7 +33,6 @@ import org.apache.http.client.methods.HttpGet; -import org.opensearch.common.Strings; import org.opensearch.common.SuppressForbidden; import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.path.PathTrie; @@ -139,13 +138,12 @@ private PathTrie defaultHandlers() { handlers.insert( nonAuthPath(HttpGet.METHOD_NAME, "/computeMetadata/v1/instance/service-accounts/default/token"), request -> jsonValue.apply( - Strings.toString( - jsonBuilder().startObject() - .field("access_token", TOKEN) - .field("expires_in", TimeUnit.HOURS.toSeconds(1)) - .field("token_type", TOKEN_TYPE) - .endObject() - ) + jsonBuilder().startObject() + .field("access_token", TOKEN) + .field("expires_in", TimeUnit.HOURS.toSeconds(1)) + .field("token_type", TOKEN_TYPE) + .endObject() + .toString() ) ); @@ -179,9 +177,7 @@ private PathTrie defaultHandlers() { ); } - final String json = Strings.toString( - jsonBuilder().startObject().field("id", "test-instances").field("items", items).endObject() - ); + final String json = jsonBuilder().startObject().field("id", "test-instances").field("items", items).endObject().toString(); final byte[] responseAsBytes = json.getBytes(StandardCharsets.UTF_8); final Map headers = new HashMap<>(JSON_CONTENT_TYPE); @@ -213,29 +209,28 @@ protected Response handle(final Request request) throws IOException { } private static Response newError(final RestStatus status, final String code, final String message) throws IOException { - final String response = Strings.toString( - jsonBuilder().startObject() - .field( - "error", - MapBuilder.newMapBuilder() - .put( - "errors", - Collections.singletonList( - MapBuilder.newMapBuilder() - .put("domain", "global") - .put("reason", "required") - .put("message", message) - .put("locationType", "header") - .put("location", code) - .immutableMap() - ) + final String response = jsonBuilder().startObject() + .field( + "error", + MapBuilder.newMapBuilder() + .put( + "errors", + Collections.singletonList( + MapBuilder.newMapBuilder() + .put("domain", "global") + .put("reason", "required") + .put("message", message) + .put("locationType", "header") + .put("location", code) + .immutableMap() ) - .put("code", status.getStatus()) - .put("message", message) - .immutableMap() - ) - .endObject() - ); + ) + .put("code", status.getStatus()) + .put("message", message) + .immutableMap() + ) + .endObject() + .toString(); return new Response(status.getStatus(), JSON_CONTENT_TYPE, response.getBytes(UTF_8)); } diff --git a/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/CorrelationVectorsEngineIT.java b/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/CorrelationVectorsEngineIT.java index dd83dfa84dbdb..000e667f39eab 100644 --- a/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/CorrelationVectorsEngineIT.java +++ b/plugins/events-correlation-engine/src/javaRestTest/java/org/opensearch/plugin/correlation/CorrelationVectorsEngineIT.java @@ -18,11 +18,11 @@ import org.opensearch.client.ResponseException; import org.opensearch.client.RestClient; import org.opensearch.client.WarningsHandler; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.Strings; import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; import org.opensearch.test.rest.OpenSearchRestTestCase; @@ -94,7 +94,7 @@ public void testQuery() throws IOException { .endObject() .endObject(); - String mapping = Strings.toString(builder); + String mapping = builder.toString(); createTestIndexWithMappingJson(client(), INDEX_NAME, mapping, getCorrelationDefaultIndexSettings()); for (int idx = 0; idx < TEST_VECTORS.length; ++idx) { @@ -209,7 +209,7 @@ public void testQueryWithWrongMapping() throws IOException { .endObject() .endObject(); - String mapping = Strings.toString(builder); + String mapping = builder.toString(); Exception ex = assertThrows(ResponseException.class, () -> { createTestIndexWithMappingJson(client(), INDEX_NAME, mapping, getCorrelationDefaultIndexSettings()); }); @@ -225,7 +225,7 @@ public void testQueryWithWrongMapping() throws IOException { private String createTestIndexWithMappingJson(RestClient client, String index, String mapping, Settings settings) throws IOException { Request request = new Request("PUT", "/" + index); - String entity = "{\"settings\": " + Strings.toString(XContentType.JSON, settings); + String entity = "{\"settings\": " + Strings.toString(MediaTypeRegistry.JSON, settings); if (mapping != null) { entity = entity + ",\"mappings\" : " + mapping; } @@ -253,7 +253,7 @@ private void addCorrelationDoc(String index, String docId, List fieldNam } builder.endObject(); - request.setJsonEntity(Strings.toString(builder)); + request.setJsonEntity(builder.toString()); Response response = client().performRequest(request); assertEquals(request.getEndpoint() + ": failed", RestStatus.CREATED, RestStatus.fromCode(response.getStatusLine().getStatusCode())); } diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilderTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilderTests.java index f23a4f25302b1..3489dfdcc4530 100644 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilderTests.java +++ b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/query/CorrelationQueryBuilderTests.java @@ -12,11 +12,11 @@ import org.junit.Assert; import org.opensearch.Version; import org.opensearch.cluster.ClusterModule; -import org.opensearch.common.Strings; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.common.io.stream.StreamInput; diff --git a/plugins/mapper-annotated-text/src/internalClusterTest/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java b/plugins/mapper-annotated-text/src/internalClusterTest/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java index d03fcd47fe991..1a8bd1ae1d2a8 100644 --- a/plugins/mapper-annotated-text/src/internalClusterTest/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java +++ b/plugins/mapper-annotated-text/src/internalClusterTest/java/org/opensearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java @@ -47,7 +47,6 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; -import org.opensearch.common.Strings; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -396,92 +395,87 @@ public void testPositionIncrementGap() throws IOException { } public void testSearchAnalyzerSerialization() throws IOException { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("field") - .field("type", "annotated_text") - .field("analyzer", "standard") - .field("search_analyzer", "keyword") - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("_doc") + .startObject("properties") + .startObject("field") + .field("type", "annotated_text") + .field("analyzer", "standard") + .field("search_analyzer", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); DocumentMapper mapper = createDocumentMapper("_doc", mapping); assertEquals(mapping, mapper.mappingSource().toString()); // special case: default index analyzer - mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("field") - .field("type", "annotated_text") - .field("analyzer", "default") - .field("search_analyzer", "keyword") - .endObject() - .endObject() - .endObject() - .endObject() - ); + mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("_doc") + .startObject("properties") + .startObject("field") + .field("type", "annotated_text") + .field("analyzer", "default") + .field("search_analyzer", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); mapper = createDocumentMapper("_doc", mapping); assertEquals(mapping, mapper.mappingSource().toString()); - mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("field") - .field("type", "annotated_text") - .field("analyzer", "keyword") - .endObject() - .endObject() - .endObject() - .endObject() - ); + mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("_doc") + .startObject("properties") + .startObject("field") + .field("type", "annotated_text") + .field("analyzer", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); mapper = createDocumentMapper("_doc", mapping); assertEquals(mapping, mapper.mappingSource().toString()); // special case: default search analyzer - mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("field") - .field("type", "annotated_text") - .field("analyzer", "keyword") - .field("search_analyzer", "default") - .endObject() - .endObject() - .endObject() - .endObject() - ); + mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("_doc") + .startObject("properties") + .startObject("field") + .field("type", "annotated_text") + .field("analyzer", "keyword") + .field("search_analyzer", "default") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); mapper = createDocumentMapper("_doc", mapping); assertEquals(mapping, mapper.mappingSource().toString()); - mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("field") - .field("type", "annotated_text") - .field("analyzer", "keyword") - .endObject() - .endObject() - .endObject() - .endObject() - ); + mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("_doc") + .startObject("properties") + .startObject("field") + .field("type", "annotated_text") + .field("analyzer", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); mapper = createDocumentMapper("_doc", mapping); XContentBuilder builder = XContentFactory.jsonBuilder(); @@ -489,48 +483,46 @@ public void testSearchAnalyzerSerialization() throws IOException { mapper.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true"))); builder.endObject(); - String mappingString = Strings.toString(builder); + String mappingString = builder.toString(); assertTrue(mappingString.contains("analyzer")); assertTrue(mappingString.contains("search_analyzer")); assertTrue(mappingString.contains("search_quote_analyzer")); } public void testSearchQuoteAnalyzerSerialization() throws IOException { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("field") - .field("type", "annotated_text") - .field("analyzer", "standard") - .field("search_analyzer", "standard") - .field("search_quote_analyzer", "keyword") - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("_doc") + .startObject("properties") + .startObject("field") + .field("type", "annotated_text") + .field("analyzer", "standard") + .field("search_analyzer", "standard") + .field("search_quote_analyzer", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); DocumentMapper mapper = createDocumentMapper("_doc", mapping); assertEquals(mapping, mapper.mappingSource().toString()); // special case: default index/search analyzer - mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("field") - .field("type", "annotated_text") - .field("analyzer", "default") - .field("search_analyzer", "default") - .field("search_quote_analyzer", "keyword") - .endObject() - .endObject() - .endObject() - .endObject() - ); + mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("_doc") + .startObject("properties") + .startObject("field") + .field("type", "annotated_text") + .field("analyzer", "default") + .field("search_analyzer", "default") + .field("search_quote_analyzer", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); mapper = createDocumentMapper("_doc", mapping); assertEquals(mapping, mapper.mappingSource().toString()); diff --git a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java index 834c8a448d3d5..87b1624cbcd64 100644 --- a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java @@ -34,7 +34,6 @@ import java.util.Collection; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.settings.Settings; @@ -103,16 +102,15 @@ public void testThatDisablingWorksWhenMerging() throws Exception { DocumentMapper docMapper = service.mapperService().documentMapper(); assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(true)); - String disabledMapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("type") - .startObject("_size") - .field("enabled", false) - .endObject() - .endObject() - .endObject() - ); + String disabledMapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type") + .startObject("_size") + .field("enabled", false) + .endObject() + .endObject() + .endObject() + .toString(); docMapper = service.mapperService() .merge("type", new CompressedXContent(disabledMapping), MapperService.MergeReason.MAPPING_UPDATE); diff --git a/qa/ccs-unavailable-clusters/src/test/java/org/opensearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/test/java/org/opensearch/search/CrossClusterSearchUnavailableClusterIT.java index 07cd901449a18..6b09d5477e8d2 100644 --- a/qa/ccs-unavailable-clusters/src/test/java/org/opensearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/test/java/org/opensearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -61,7 +61,6 @@ import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.node.DiscoveryNodes; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.json.JsonXContent; @@ -341,7 +340,7 @@ private static HttpEntity buildUpdateSettingsRequestBody(Map set builder.endObject(); } builder.endObject(); - requestBody = Strings.toString(builder); + requestBody = builder.toString(); } return new StringEntity(requestBody, ContentType.APPLICATION_JSON); } diff --git a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java index 5fef24e75d8b7..95bc7f8dc404e 100644 --- a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java @@ -41,11 +41,11 @@ import org.opensearch.cluster.metadata.MetadataIndexStateService; import org.opensearch.common.Booleans; import org.opensearch.common.CheckedFunction; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.xcontent.support.XContentMapValues; +import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; import org.opensearch.test.NotEqualMessageBuilder; @@ -149,7 +149,7 @@ public void testSearch() throws Exception { mappingsAndSettings.endObject(); Request createIndex = new Request("PUT", "/" + index); - createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + createIndex.setJsonEntity(mappingsAndSettings.toString()); createIndex.setOptions(allowTypesRemovalWarnings()); client().performRequest(createIndex); @@ -208,7 +208,7 @@ public void testNewReplicasWork() throws Exception { mappingsAndSettings.endObject(); Request createIndex = new Request("PUT", "/" + index); - createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + createIndex.setJsonEntity(mappingsAndSettings.toString()); client().performRequest(createIndex); int numDocs = randomIntBetween(2000, 3000); @@ -257,7 +257,7 @@ public void testClusterState() throws Exception { } mappingsAndSettings.endObject(); Request createTemplate = new Request("PUT", "/_template/template_1"); - createTemplate.setJsonEntity(Strings.toString(mappingsAndSettings)); + createTemplate.setJsonEntity(mappingsAndSettings.toString()); client().performRequest(createTemplate); client().performRequest(new Request("PUT", "/" + index)); } @@ -315,7 +315,7 @@ public void testShrink() throws IOException, NumberFormatException, ParseExcepti mappingsAndSettings.endObject(); Request createIndex = new Request("PUT", "/" + index); - createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + createIndex.setJsonEntity(mappingsAndSettings.toString()); client().performRequest(createIndex); numDocs = randomIntBetween(512, 1024); @@ -384,7 +384,7 @@ public void testShrinkAfterUpgrade() throws IOException, ParseException { mappingsAndSettings.endObject(); Request createIndex = new Request("PUT", "/" + index); - createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + createIndex.setJsonEntity(mappingsAndSettings.toString()); client().performRequest(createIndex); numDocs = randomIntBetween(512, 1024); @@ -854,7 +854,7 @@ public void testSnapshotRestore() throws IOException, ParseException { } templateBuilder.endObject().endObject(); Request createTemplateRequest = new Request("PUT", "/_template/test_template"); - createTemplateRequest.setJsonEntity(Strings.toString(templateBuilder)); + createTemplateRequest.setJsonEntity(templateBuilder.toString()); client().performRequest(createTemplateRequest); @@ -870,7 +870,7 @@ public void testSnapshotRestore() throws IOException, ParseException { } repoConfig.endObject(); Request createRepoRequest = new Request("PUT", "/_snapshot/repo"); - createRepoRequest.setJsonEntity(Strings.toString(repoConfig)); + createRepoRequest.setJsonEntity(repoConfig.toString()); client().performRequest(createRepoRequest); } @@ -897,7 +897,7 @@ public void testHistoryUUIDIsAdded() throws Exception { } mappingsAndSettings.endObject(); Request createIndex = new Request("PUT", "/" + index); - createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + createIndex.setJsonEntity(mappingsAndSettings.toString()); client().performRequest(createIndex); } else { ensureGreenLongWait(index); @@ -940,11 +940,11 @@ public void testSoftDeletes() throws Exception { } mappingsAndSettings.endObject(); Request createIndex = new Request("PUT", "/" + index); - createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + createIndex.setJsonEntity(mappingsAndSettings.toString()); client().performRequest(createIndex); int numDocs = between(10, 100); for (int i = 0; i < numDocs; i++) { - String doc = Strings.toString(JsonXContent.contentBuilder().startObject().field("field", "v1").endObject()); + String doc = JsonXContent.contentBuilder().startObject().field("field", "v1").endObject().toString(); Request request = new Request("POST", "/" + index + "/_doc/" + i); request.setJsonEntity(doc); client().performRequest(request); @@ -955,7 +955,7 @@ public void testSoftDeletes() throws Exception { assertTotalHits(liveDocs, entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")))); for (int i = 0; i < numDocs; i++) { if (randomBoolean()) { - String doc = Strings.toString(JsonXContent.contentBuilder().startObject().field("field", "v2").endObject()); + String doc = JsonXContent.contentBuilder().startObject().field("field", "v2").endObject().toString(); Request request = new Request("POST", "/" + index + "/_doc/" + i); request.setJsonEntity(doc); client().performRequest(request); @@ -989,7 +989,7 @@ public void testClosedIndices() throws Exception { numDocs = between(1, 100); for (int i = 0; i < numDocs; i++) { final Request request = new Request("POST", "/" + index + "/" + type + "/" + i); - request.setJsonEntity(Strings.toString(JsonXContent.contentBuilder().startObject().field("field", "v1").endObject())); + request.setJsonEntity(JsonXContent.contentBuilder().startObject().field("field", "v1").endObject().toString()); assertOK(client().performRequest(request)); if (rarely()) { refresh(); @@ -1083,7 +1083,7 @@ private void checkSnapshot(final String snapshotName, final int count, final Ver restoreCommand.endObject(); Request restoreRequest = new Request("POST", "/_snapshot/repo/" + snapshotName + "/_restore"); restoreRequest.addParameter("wait_for_completion", "true"); - restoreRequest.setJsonEntity(Strings.toString(restoreCommand)); + restoreRequest.setJsonEntity(restoreCommand.toString()); client().performRequest(restoreRequest); // Make sure search finds all documents @@ -1158,7 +1158,7 @@ private void indexRandomDocuments( for (int i = 0; i < count; i++) { logger.debug("Indexing document [{}]", i); Request createDocument = new Request("POST", "/" + index + "/_doc/" + (specifyId ? i : "")); - createDocument.setJsonEntity(Strings.toString(docSupplier.apply(i))); + createDocument.setJsonEntity(docSupplier.apply(i).toString()); client().performRequest(createDocument); if (rarely()) { refreshAllIndices(); @@ -1175,7 +1175,7 @@ private void indexRandomDocuments( private void indexDocument(String id) throws IOException { final Request indexRequest = new Request("POST", "/" + index + "/" + type + "/" + id); - indexRequest.setJsonEntity(Strings.toString(JsonXContent.contentBuilder().startObject().field("f", "v").endObject())); + indexRequest.setJsonEntity(JsonXContent.contentBuilder().startObject().field("f", "v").endObject().toString()); assertOK(client().performRequest(indexRequest)); } @@ -1190,7 +1190,7 @@ private void saveInfoDocument(String id, String value) throws IOException { // Only create the first version so we know how many documents are created when the index is first created Request request = new Request("PUT", "/info/" + type + "/" + id); request.addParameter("op_type", "create"); - request.setJsonEntity(Strings.toString(infoDoc)); + request.setJsonEntity(infoDoc.toString()); client().performRequest(request); } @@ -1255,7 +1255,7 @@ public void testPeerRecoveryRetentionLeases() throws Exception { settings.endObject(); Request createIndex = new Request("PUT", "/" + index); - createIndex.setJsonEntity(Strings.toString(settings)); + createIndex.setJsonEntity(settings.toString()); client().performRequest(createIndex); } ensureGreen(index); @@ -1485,7 +1485,7 @@ public void testEnableSoftDeletesOnRestore() throws Exception { } repoConfig.endObject(); Request createRepoRequest = new Request("PUT", "/_snapshot/repo"); - createRepoRequest.setJsonEntity(Strings.toString(repoConfig)); + createRepoRequest.setJsonEntity(repoConfig.toString()); client().performRequest(createRepoRequest); // create snapshot Request createSnapshot = new Request("PUT", "/_snapshot/repo/" + snapshot); @@ -1507,7 +1507,7 @@ public void testEnableSoftDeletesOnRestore() throws Exception { restoreCommand.endObject(); Request restoreRequest = new Request("POST", "/_snapshot/repo/" + snapshot + "/_restore"); restoreRequest.addParameter("wait_for_completion", "true"); - restoreRequest.setJsonEntity(Strings.toString(restoreCommand)); + restoreRequest.setJsonEntity(restoreCommand.toString()); client().performRequest(restoreRequest); ensureGreen(restoredIndex); int numDocs = countOfIndexedRandomDocuments(); @@ -1539,7 +1539,7 @@ public void testForbidDisableSoftDeletesOnRestore() throws Exception { } repoConfig.endObject(); Request createRepoRequest = new Request("PUT", "/_snapshot/repo"); - createRepoRequest.setJsonEntity(Strings.toString(repoConfig)); + createRepoRequest.setJsonEntity(repoConfig.toString()); client().performRequest(createRepoRequest); // create snapshot Request createSnapshot = new Request("PUT", "/_snapshot/repo/" + snapshot); @@ -1560,7 +1560,7 @@ public void testForbidDisableSoftDeletesOnRestore() throws Exception { restoreCommand.endObject(); Request restoreRequest = new Request("POST", "/_snapshot/repo/" + snapshot + "/_restore"); restoreRequest.addParameter("wait_for_completion", "true"); - restoreRequest.setJsonEntity(Strings.toString(restoreCommand)); + restoreRequest.setJsonEntity(restoreCommand.toString()); final ResponseException error = expectThrows(ResponseException.class, () -> client().performRequest(restoreRequest)); assertThat(error.getMessage(), containsString("cannot disable setting [index.soft_deletes.enabled] on restore")); } diff --git a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/QueryBuilderBWCIT.java b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/QueryBuilderBWCIT.java index 724ac9883efaa..aabc3aee8887f 100644 --- a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/QueryBuilderBWCIT.java +++ b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/QueryBuilderBWCIT.java @@ -36,7 +36,6 @@ import org.apache.hc.core5.http.io.entity.EntityUtils; import org.opensearch.client.Request; import org.opensearch.client.Response; -import org.opensearch.common.Strings; import org.opensearch.core.common.io.stream.InputStreamStreamInput; import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; @@ -199,7 +198,7 @@ public void testQueryBuilderBWC() throws Exception { mappingsAndSettings.endObject(); Request request = new Request("PUT", "/" + index); request.setOptions(allowTypesRemovalWarnings()); - request.setJsonEntity(Strings.toString(mappingsAndSettings)); + request.setJsonEntity(mappingsAndSettings.toString()); Response rsp = client().performRequest(request); assertEquals(200, rsp.getStatusLine().getStatusCode()); diff --git a/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java b/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java index f9810e027bb1e..75f7e00f499c6 100644 --- a/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java +++ b/qa/mixed-cluster/src/test/java/org/opensearch/backwards/IndexingIT.java @@ -42,7 +42,6 @@ import org.opensearch.client.ResponseException; import org.opensearch.client.RestClient; import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.xcontent.support.XContentMapValues; @@ -365,15 +364,14 @@ public void testUpdateSnapshotStatus() throws Exception { // Create the repository before taking the snapshot. Request request = new Request("PUT", "/_snapshot/repo"); - request.setJsonEntity(Strings - .toString(JsonXContent.contentBuilder() + request.setJsonEntity(JsonXContent.contentBuilder() .startObject() .field("type", "fs") .startObject("settings") .field("compress", randomBoolean()) .field("location", System.getProperty("tests.path.repo")) .endObject() - .endObject())); + .endObject().toString()); assertOK(client().performRequest(request)); diff --git a/qa/multi-cluster-search/src/test/java/org/opensearch/search/CCSDuelIT.java b/qa/multi-cluster-search/src/test/java/org/opensearch/search/CCSDuelIT.java index 5f52250c7fb0c..1f2409741a878 100644 --- a/qa/multi-cluster-search/src/test/java/org/opensearch/search/CCSDuelIT.java +++ b/qa/multi-cluster-search/src/test/java/org/opensearch/search/CCSDuelIT.java @@ -53,12 +53,12 @@ import org.opensearch.client.RestHighLevelClient; import org.opensearch.client.indices.CreateIndexRequest; import org.opensearch.client.indices.CreateIndexResponse; -import org.opensearch.common.Strings; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.util.io.IOUtils; +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.index.query.InnerHitBuilder; import org.opensearch.index.query.MatchQueryBuilder; import org.opensearch.index.query.QueryBuilders; diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java index 3dd9f371f06fd..6d143d08452e9 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/RecoveryIT.java @@ -40,11 +40,11 @@ import org.opensearch.cluster.metadata.MetadataIndexStateService; import org.opensearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.opensearch.common.Booleans; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.support.XContentMapValues; +import org.opensearch.core.common.Strings; import org.opensearch.index.IndexSettings; import org.opensearch.index.mapper.MapperService; import org.opensearch.core.rest.RestStatus; diff --git a/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java b/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java index 4a898d816bbf4..b1143ad647327 100644 --- a/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java +++ b/qa/smoke-test-http/src/test/java/org/opensearch/http/SearchRestCancellationIT.java @@ -48,8 +48,8 @@ import org.opensearch.client.Response; import org.opensearch.client.ResponseListener; import org.opensearch.common.SetOnce; -import org.opensearch.common.Strings; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.Strings; import org.opensearch.plugins.Plugin; import org.opensearch.plugins.PluginsService; import org.opensearch.script.MockScriptPlugin; @@ -83,7 +83,6 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.instanceOf; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.awaitLatch; public class SearchRestCancellationIT extends HttpSmokeTestCase { diff --git a/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java b/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java index 0dc62b160ff3f..5f0f468898c47 100644 --- a/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java +++ b/qa/translog-policy/src/test/java/org/opensearch/upgrades/TranslogPolicyIT.java @@ -35,7 +35,7 @@ import org.opensearch.LegacyESVersion; import org.opensearch.client.Request; import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.index.IndexSettings; diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java index 4cbcc5b9bb507..966c040d7877c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java @@ -49,7 +49,6 @@ import org.opensearch.cluster.routing.allocation.NodeAllocationResult; import org.opensearch.cluster.routing.allocation.decider.Decision; import org.opensearch.common.Priority; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.set.Sets; @@ -1275,7 +1274,7 @@ private ClusterAllocationExplanation runExplain(boolean primary, String nodeId, XContentBuilder builder = JsonXContent.contentBuilder(); builder.prettyPrint(); builder.humanReadable(true); - logger.debug("--> explain json output: \n{}", Strings.toString(explanation.toXContent(builder, ToXContent.EMPTY_PARAMS))); + logger.debug("--> explain json output: \n{}", explanation.toXContent(builder, ToXContent.EMPTY_PARAMS).toString()); } return explanation; } diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java index 53afa53de92f3..139b2cb896ded 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java @@ -46,9 +46,9 @@ import org.opensearch.action.update.UpdateResponse; import org.opensearch.client.Requests; import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.Strings; import org.opensearch.index.VersionType; import org.opensearch.indices.IndexClosedException; import org.opensearch.plugins.Plugin; diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java index 637cc96bdfc44..14b6ffcd50825 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/PrimaryAllocationIT.java @@ -46,10 +46,10 @@ import org.opensearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; import org.opensearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.set.Sets; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.Strings; import org.opensearch.gateway.GatewayAllocator; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.engine.Engine; diff --git a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java index 581b352e917f0..10e6aa906ecc9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java @@ -43,7 +43,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.WriteRequest.RefreshPolicy; import org.opensearch.cluster.health.ClusterHealthStatus; -import org.opensearch.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; @@ -117,10 +116,10 @@ public void testIndexActions() throws Exception { for (int i = 0; i < 5; i++) { getResult = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test")))); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("1", "test").toString())); assertThat("cycle(map) #" + i, (String) getResult.getSourceAsMap().get("name"), equalTo("test")); getResult = client().get(getRequest("test").id("1")).actionGet(); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test")))); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("1", "test").toString())); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); } @@ -168,10 +167,10 @@ public void testIndexActions() throws Exception { for (int i = 0; i < 5; i++) { getResult = client().get(getRequest("test").id("1")).actionGet(); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("1", "test")))); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("1", "test").toString())); getResult = client().get(getRequest("test").id("2")).actionGet(); String ste1 = getResult.getSourceAsString(); - String ste2 = Strings.toString(source("2", "test2")); + String ste2 = source("2", "test2").toString(); assertThat("cycle #" + i, ste1, equalTo(ste2)); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); } @@ -258,15 +257,15 @@ public void testBulk() throws Exception { assertThat("cycle #" + i, getResult.isExists(), equalTo(false)); getResult = client().get(getRequest("test").id("2")).actionGet(); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("2", "test")))); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("2", "test").toString())); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); getResult = client().get(getRequest("test").id(generatedId3)).actionGet(); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("3", "test")))); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("3", "test").toString())); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); getResult = client().get(getRequest("test").id(generatedId4)).actionGet(); - assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(Strings.toString(source("4", "test")))); + assertThat("cycle #" + i, getResult.getSourceAsString(), equalTo(source("4", "test").toString())); assertThat(getResult.getIndex(), equalTo(getConcreteIndexName())); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java index 0b3a689e81b94..c88bf942fa8d0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java @@ -46,7 +46,6 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.routing.UnassignedInfo; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.env.NodeEnvironment; @@ -110,16 +109,15 @@ public void testOneNodeRecoverFromGateway() throws Exception { internalCluster().startNode(); - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("appAccountIds") - .field("type", "text") - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("appAccountIds") + .field("type", "text") + .endObject() + .endObject() + .endObject() + .toString(); assertAcked(prepareCreate("test").setMapping(mapping)); client().prepareIndex("test") @@ -204,19 +202,18 @@ private Map assertAndCapturePrimaryTerms(Map pre public void testSingleNodeNoFlush() throws Exception { internalCluster().startNode(); - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("field") - .field("type", "text") - .endObject() - .startObject("num") - .field("type", "integer") - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("field") + .field("type", "text") + .endObject() + .startObject("num") + .field("type", "integer") + .endObject() + .endObject() + .endObject() + .toString(); // note: default replica settings are tied to #data nodes-1 which is 0 here. We can do with 1 in this test. int numberOfShards = numberOfShards(); assertAcked( diff --git a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java index 997e8e9d5258b..2375c62342533 100644 --- a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java @@ -45,12 +45,12 @@ import org.opensearch.action.index.IndexResponse; import org.opensearch.core.action.support.DefaultShardOperationFailedException; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.index.engine.VersionConflictEngineException; import org.opensearch.plugins.Plugin; import org.opensearch.core.rest.RestStatus; @@ -288,17 +288,16 @@ public void testSimpleMultiGet() throws Exception { } public void testGetDocWithMultivaluedFields() throws Exception { - String mapping1 = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("field") - .field("type", "text") - .field("store", true) - .endObject() - .endObject() - .endObject() - ); + String mapping1 = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("field") + .field("type", "text") + .field("store", true) + .endObject() + .endObject() + .endObject() + .toString(); assertAcked(prepareCreate("test").setMapping(mapping1)); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java index f4ccea40e6e3f..a25b6049e92a0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java @@ -33,7 +33,6 @@ package org.opensearch.index.mapper; import org.opensearch.action.search.SearchResponse; -import org.opensearch.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.index.query.QueryBuilders; @@ -78,16 +77,15 @@ public void testDynamicTemplateCopyTo() throws Exception { } public void testDynamicObjectCopyTo() throws Exception { - String mapping = Strings.toString( - jsonBuilder().startObject() - .startObject("properties") - .startObject("foo") - .field("type", "text") - .field("copy_to", "root.top.child") - .endObject() - .endObject() - .endObject() - ); + String mapping = jsonBuilder().startObject() + .startObject("properties") + .startObject("foo") + .field("type", "text") + .field("copy_to", "root.top.child") + .endObject() + .endObject() + .endObject() + .toString(); assertAcked(client().admin().indices().prepareCreate("test-idx").setMapping(mapping)); client().prepareIndex("test-idx").setId("1").setSource("foo", "bar").get(); client().admin().indices().prepareRefresh("test-idx").execute().actionGet(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java index 4b95c2fc4bce5..d9eeb3f7f8f42 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java @@ -53,8 +53,8 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.CheckedFunction; import org.opensearch.common.CheckedRunnable; -import org.opensearch.common.Strings; import org.opensearch.common.UUIDs; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.settings.Settings; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java index 30c6a0dc068e5..1525c7bada9ac 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -34,7 +34,6 @@ import org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata; -import org.opensearch.common.Strings; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -187,22 +186,22 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { .get(); XContentBuilder responseBuilder = XContentFactory.jsonBuilder().prettyPrint(); response.toXContent(responseBuilder, new ToXContent.MapParams(params)); - String responseStrings = Strings.toString(responseBuilder); + String responseStrings = responseBuilder.toString(); XContentBuilder prettyJsonBuilder = XContentFactory.jsonBuilder().prettyPrint(); prettyJsonBuilder.copyCurrentStructure(createParser(JsonXContent.jsonXContent, responseStrings)); - assertThat(responseStrings, equalTo(Strings.toString(prettyJsonBuilder))); + assertThat(responseStrings, equalTo(prettyJsonBuilder.toString())); params.put("pretty", "false"); response = client().admin().indices().prepareGetFieldMappings("index").setFields("field1", "obj.subfield").get(); responseBuilder = XContentFactory.jsonBuilder().prettyPrint().lfAtEnd(); response.toXContent(responseBuilder, new ToXContent.MapParams(params)); - responseStrings = Strings.toString(responseBuilder); + responseStrings = responseBuilder.toString(); prettyJsonBuilder = XContentFactory.jsonBuilder().prettyPrint(); prettyJsonBuilder.copyCurrentStructure(createParser(JsonXContent.jsonXContent, responseStrings)); - assertThat(responseStrings, not(equalTo(Strings.toString(prettyJsonBuilder)))); + assertThat(responseStrings, not(equalTo(prettyJsonBuilder).toString())); } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 5ce9de7d0eae0..62efdc6a722ee 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -43,7 +43,6 @@ import org.opensearch.action.admin.indices.refresh.RefreshResponse; import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchRequestBuilder; -import org.opensearch.common.Strings; import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; @@ -99,22 +98,20 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc assertThat("Breaker is not set to 0", node.getBreaker().getStats(CircuitBreaker.FIELDDATA).getEstimated(), equalTo(0L)); } - String mapping = Strings // {} - .toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("test-str") - .field("type", "keyword") - .field("doc_values", randomBoolean()) - .endObject() // test-str - .startObject("test-num") - // I don't use randomNumericType() here because I don't want "byte", and I want "float" and "double" - .field("type", randomFrom(Arrays.asList("float", "long", "double", "short", "integer"))) - .endObject() // test-num - .endObject() // properties - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("test-str") + .field("type", "keyword") + .field("doc_values", randomBoolean()) + .endObject() // test-str + .startObject("test-num") + // I don't use randomNumericType() here because I don't want "byte", and I want "float" and "double" + .field("type", randomFrom(Arrays.asList("float", "long", "double", "short", "integer"))) + .endObject() // test-num + .endObject() // properties + .endObject() + .toString(); final double topLevelRate; final double lowLevelRate; if (frequently()) { diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java index 850f08b8136c1..efd43ec5ad82d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java @@ -73,7 +73,7 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.Priority; import org.opensearch.common.SetOnce; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.common.concurrent.GatedCloseable; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java index fde30f35d1b6d..3bbd4f83d2b3c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java @@ -44,7 +44,6 @@ import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.index.IndexNotFoundException; @@ -301,16 +300,15 @@ public void testOpenWaitingForActiveShardsFailed() throws Exception { } public void testOpenCloseWithDocs() throws IOException, ExecutionException, InterruptedException { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("test") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("test") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .toString(); assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping)); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java index 9db0ac4590efa..eedd9328826a5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java @@ -48,10 +48,10 @@ import org.opensearch.action.search.SearchScrollAction; import org.opensearch.action.search.ShardSearchFailure; import org.opensearch.action.support.WriteRequest; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.Strings; import org.opensearch.plugins.Plugin; import org.opensearch.plugins.PluginsService; import org.opensearch.script.MockScriptPlugin; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java index 617c5745c9bba..271492e47d2ef 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java @@ -35,7 +35,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.time.DateFormatters; @@ -1309,16 +1308,15 @@ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception { } public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception { - String mappingJson = Strings.toString( - jsonBuilder().startObject() - .startObject("properties") - .startObject("date") - .field("type", "date") - .field("format", "strict_date_optional_time||dd-MM-yyyy") - .endObject() - .endObject() - .endObject() - ); + String mappingJson = jsonBuilder().startObject() + .startObject("properties") + .startObject("date") + .field("type", "date") + .field("format", "strict_date_optional_time||dd-MM-yyyy") + .endObject() + .endObject() + .endObject() + .toString(); prepareCreate("idx2").setMapping(mappingJson).get(); IndexRequestBuilder[] reqs = new IndexRequestBuilder[5]; for (int i = 0; i < reqs.length; i++) { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index 43d49dc0bfd60..0854faf6c515c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -35,7 +35,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; @@ -208,7 +207,7 @@ public void testXContentResponse() throws Exception { + "\"score\":0.75," + "\"bg_count\":4" + "}]}}]}}"; - assertThat(Strings.toString(responseBuilder), equalTo(result)); + assertThat(responseBuilder.toString(), equalTo(result)); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java index 0b55ea9119d89..b33adea494d17 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java @@ -43,7 +43,6 @@ import org.opensearch.action.index.IndexResponse; import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; @@ -81,16 +80,15 @@ protected boolean addMockInternalEngine() { } public void testRandomExceptions() throws IOException, InterruptedException, ExecutionException { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("test") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("test") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .toString(); final double lowLevelRate; final double topLevelRate; if (frequently()) { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java index b0adc00f37fee..7cd389e6274dc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -42,7 +42,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Requests; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentFactory; @@ -69,16 +68,15 @@ protected Collection> nodePlugins() { } public void testRandomDirectoryIOExceptions() throws IOException, InterruptedException, ExecutionException { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("test") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("test") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .toString(); final double exceptionRate; final double exceptionOnOpenRate; if (frequently()) { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index f2a22b99a86a3..bf1ca409eee92 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -44,7 +44,6 @@ import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.WriteRequest; -import org.opensearch.common.Strings; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.Settings.Builder; @@ -3231,26 +3230,25 @@ public void testCopyToFields() throws Exception { } public void testACopyFieldWithNestedQuery() throws Exception { - String mapping = Strings.toString( - jsonBuilder().startObject() - .startObject("properties") - .startObject("foo") - .field("type", "nested") - .startObject("properties") - .startObject("text") - .field("type", "text") - .field("copy_to", "foo_text") - .endObject() - .endObject() - .endObject() - .startObject("foo_text") - .field("type", "text") - .field("term_vector", "with_positions_offsets") - .field("store", true) - .endObject() - .endObject() - .endObject() - ); + String mapping = jsonBuilder().startObject() + .startObject("properties") + .startObject("foo") + .field("type", "nested") + .startObject("properties") + .startObject("text") + .field("type", "text") + .field("copy_to", "foo_text") + .endObject() + .endObject() + .endObject() + .startObject("foo_text") + .field("type", "text") + .field("term_vector", "with_positions_offsets") + .field("store", true) + .endObject() + .endObject() + .endObject() + .toString(); prepareCreate("test").setMapping(mapping).get(); client().prepareIndex("test") @@ -3361,25 +3359,24 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { } public void testWithNestedQuery() throws Exception { - String mapping = Strings.toString( - jsonBuilder().startObject() - .startObject("properties") - .startObject("text") - .field("type", "text") - .field("index_options", "offsets") - .field("term_vector", "with_positions_offsets") - .endObject() - .startObject("foo") - .field("type", "nested") - .startObject("properties") - .startObject("text") - .field("type", "text") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = jsonBuilder().startObject() + .startObject("properties") + .startObject("text") + .field("type", "text") + .field("index_options", "offsets") + .field("term_vector", "with_positions_offsets") + .endObject() + .startObject("foo") + .field("type", "nested") + .startObject("properties") + .startObject("text") + .field("type", "text") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); prepareCreate("test").setMapping(mapping).get(); client().prepareIndex("test") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java index 90d0a59f7b58d..53eb290e1edbf 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.Numbers; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.collect.MapBuilder; @@ -201,27 +200,26 @@ static Object docScript(Map vars, String fieldName) { public void testStoredFields() throws Exception { createIndex("test"); - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) - .startObject("properties") - .startObject("field1") - .field("type", "text") - .field("store", true) - .endObject() - .startObject("field2") - .field("type", "text") - .field("store", false) - .endObject() - .startObject("field3") - .field("type", "text") - .field("store", true) - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("properties") + .startObject("field1") + .field("type", "text") + .field("store", true) + .endObject() + .startObject("field2") + .field("type", "text") + .field("store", false) + .endObject() + .startObject("field3") + .field("type", "text") + .field("store", true) + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); @@ -304,19 +302,18 @@ public void testStoredFields() throws Exception { public void testScriptDocAndFields() throws Exception { createIndex("test"); - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) - .startObject("properties") - .startObject("num1") - .field("type", "double") - .field("store", true) - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("properties") + .startObject("num1") + .field("type", "double") + .field("store", true) + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); @@ -406,19 +403,18 @@ public void testScriptDocAndFields() throws Exception { public void testScriptWithUnsignedLong() throws Exception { createIndex("test"); - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) - .startObject("properties") - .startObject("unsigned_num1") - .field("type", "unsigned_long") - .field("store", true) - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("properties") + .startObject("unsigned_num1") + .field("type", "unsigned_long") + .field("store", true) + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); @@ -509,18 +505,17 @@ public void testScriptWithUnsignedLong() throws Exception { public void testScriptFieldWithNanos() throws Exception { createIndex("test"); - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) - .startObject("properties") - .startObject("date") - .field("type", "date_nanos") - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("properties") + .startObject("date") + .field("type", "date_nanos") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); String date = "2019-01-31T10:00:00.123456789Z"; @@ -706,58 +701,57 @@ public void testPartialFields() throws Exception { public void testStoredFieldsWithoutSource() throws Exception { createIndex("test"); - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) - .startObject("_source") - .field("enabled", false) - .endObject() - .startObject("properties") - .startObject("byte_field") - .field("type", "byte") - .field("store", true) - .endObject() - .startObject("short_field") - .field("type", "short") - .field("store", true) - .endObject() - .startObject("integer_field") - .field("type", "integer") - .field("store", true) - .endObject() - .startObject("long_field") - .field("type", "long") - .field("store", true) - .endObject() - .startObject("float_field") - .field("type", "float") - .field("store", true) - .endObject() - .startObject("double_field") - .field("type", "double") - .field("store", true) - .endObject() - .startObject("date_field") - .field("type", "date") - .field("store", true) - .endObject() - .startObject("boolean_field") - .field("type", "boolean") - .field("store", true) - .endObject() - .startObject("binary_field") - .field("type", "binary") - .field("store", true) - .endObject() - .startObject("unsigned_long_field") - .field("type", "unsigned_long") - .field("store", true) - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("_source") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("byte_field") + .field("type", "byte") + .field("store", true) + .endObject() + .startObject("short_field") + .field("type", "short") + .field("store", true) + .endObject() + .startObject("integer_field") + .field("type", "integer") + .field("store", true) + .endObject() + .startObject("long_field") + .field("type", "long") + .field("store", true) + .endObject() + .startObject("float_field") + .field("type", "float") + .field("store", true) + .endObject() + .startObject("double_field") + .field("type", "double") + .field("store", true) + .endObject() + .startObject("date_field") + .field("type", "date") + .field("store", true) + .endObject() + .startObject("boolean_field") + .field("type", "boolean") + .field("store", true) + .endObject() + .startObject("binary_field") + .field("type", "binary") + .field("store", true) + .endObject() + .startObject("unsigned_long_field") + .field("type", "unsigned_long") + .field("store", true) + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); @@ -944,59 +938,58 @@ public void testSingleValueFieldDatatField() throws ExecutionException, Interrup public void testDocValueFields() throws Exception { createIndex("test"); - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) - .startObject("_source") - .field("enabled", false) - .endObject() - .startObject("properties") - .startObject("text_field") - .field("type", "text") - .field("fielddata", true) - .endObject() - .startObject("keyword_field") - .field("type", "keyword") - .endObject() - .startObject("byte_field") - .field("type", "byte") - .endObject() - .startObject("short_field") - .field("type", "short") - .endObject() - .startObject("integer_field") - .field("type", "integer") - .endObject() - .startObject("long_field") - .field("type", "long") - .endObject() - .startObject("float_field") - .field("type", "float") - .endObject() - .startObject("double_field") - .field("type", "double") - .endObject() - .startObject("date_field") - .field("type", "date") - .endObject() - .startObject("boolean_field") - .field("type", "boolean") - .endObject() - .startObject("binary_field") - .field("type", "binary") - .field("doc_values", true) // off by default on binary fields - .endObject() - .startObject("ip_field") - .field("type", "ip") - .endObject() - .startObject("flat_object_field") - .field("type", "flat_object") - .endObject() - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("_source") + .field("enabled", false) + .endObject() + .startObject("properties") + .startObject("text_field") + .field("type", "text") + .field("fielddata", true) + .endObject() + .startObject("keyword_field") + .field("type", "keyword") + .endObject() + .startObject("byte_field") + .field("type", "byte") + .endObject() + .startObject("short_field") + .field("type", "short") + .endObject() + .startObject("integer_field") + .field("type", "integer") + .endObject() + .startObject("long_field") + .field("type", "long") + .endObject() + .startObject("float_field") + .field("type", "float") + .endObject() + .startObject("double_field") + .field("type", "double") + .endObject() + .startObject("date_field") + .field("type", "date") + .endObject() + .startObject("boolean_field") + .field("type", "boolean") + .endObject() + .startObject("binary_field") + .field("type", "binary") + .field("doc_values", true) // off by default on binary fields + .endObject() + .startObject("ip_field") + .field("type", "ip") + .endObject() + .startObject("flat_object_field") + .field("type", "flat_object") + .endObject() + .endObject() + .endObject() + .endObject() + .toString(); client().admin().indices().preparePutMapping().setSource(mapping, XContentType.JSON).get(); @@ -1067,9 +1060,6 @@ public void testDocValueFields() throws Exception { ) ) ); - String json = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("flat_object_field").field("foo", "bar").endObject().endObject() - ); assertThat(searchResponse.getHits().getAt(0).getFields().get("byte_field").getValue().toString(), equalTo("1")); assertThat(searchResponse.getHits().getAt(0).getFields().get("short_field").getValue().toString(), equalTo("2")); assertThat(searchResponse.getHits().getAt(0).getFields().get("integer_field").getValue(), equalTo((Object) 3L)); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java index bb14ed1ea5578..d4467b49d1c18 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java @@ -47,7 +47,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.Priority; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.geo.GeoUtils; @@ -211,17 +210,16 @@ public void testShapeRelations() throws Exception { assertTrue("Disjoint relation is not supported", disjointSupport); assertTrue("within relation is not supported", withinSupport); - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("area") - .field("type", "geo_shape") - .field("tree", "geohash") - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("area") + .field("type", "geo_shape") + .field("tree", "geohash") + .endObject() + .endObject() + .endObject() + .toString(); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("shapes").setMapping(mapping); mappingRequest.get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java index 89eb6038d8110..cf1a1f82d7200 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.routing.IndexShardRoutingTable; -import org.opensearch.common.Strings; import org.opensearch.common.geo.builders.PointBuilder; import org.opensearch.common.geo.builders.ShapeBuilder; import org.opensearch.common.settings.Settings; @@ -73,32 +72,30 @@ protected Settings nodeSettings(int nodeOrdinal) { */ public void testOrientationPersistence() throws Exception { String idxName = "orientation"; - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("location") - .field("type", "geo_shape") - .field("orientation", "left") - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("location") + .field("type", "geo_shape") + .field("orientation", "left") + .endObject() + .endObject() + .endObject() + .toString(); // create index assertAcked(prepareCreate(idxName).setMapping(mapping)); - mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("location") - .field("type", "geo_shape") - .field("orientation", "right") - .endObject() - .endObject() - .endObject() - ); + mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("location") + .field("type", "geo_shape") + .field("orientation", "right") + .endObject() + .endObject() + .endObject() + .toString(); assertAcked(prepareCreate(idxName + "2").setMapping(mapping)); ensureGreen(idxName, idxName + "2"); @@ -140,44 +137,43 @@ public void testIgnoreMalformed() throws Exception { ensureGreen(); // test self crossing ccw poly not crossing dateline - String polygonGeoJson = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .field("type", "Polygon") - .startArray("coordinates") - .startArray() - .startArray() - .value(176.0) - .value(15.0) - .endArray() - .startArray() - .value(-177.0) - .value(10.0) - .endArray() - .startArray() - .value(-177.0) - .value(-10.0) - .endArray() - .startArray() - .value(176.0) - .value(-15.0) - .endArray() - .startArray() - .value(-177.0) - .value(15.0) - .endArray() - .startArray() - .value(172.0) - .value(0.0) - .endArray() - .startArray() - .value(176.0) - .value(15.0) - .endArray() - .endArray() - .endArray() - .endObject() - ); + String polygonGeoJson = XContentFactory.jsonBuilder() + .startObject() + .field("type", "Polygon") + .startArray("coordinates") + .startArray() + .startArray() + .value(176.0) + .value(15.0) + .endArray() + .startArray() + .value(-177.0) + .value(10.0) + .endArray() + .startArray() + .value(-177.0) + .value(-10.0) + .endArray() + .startArray() + .value(176.0) + .value(-15.0) + .endArray() + .startArray() + .value(-177.0) + .value(15.0) + .endArray() + .startArray() + .value(172.0) + .value(0.0) + .endArray() + .startArray() + .value(176.0) + .value(15.0) + .endArray() + .endArray() + .endArray() + .endObject() + .toString(); indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", polygonGeoJson)); SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java index 11f2132bb29de..6332e2b94750d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.routing.IndexShardRoutingTable; -import org.opensearch.common.Strings; import org.opensearch.common.geo.builders.ShapeBuilder; import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.ToXContent; @@ -65,34 +64,32 @@ public class LegacyGeoShapeIntegrationIT extends OpenSearchIntegTestCase { */ public void testOrientationPersistence() throws Exception { String idxName = "orientation"; - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("orientation", "left") - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("orientation", "left") + .endObject() + .endObject() + .endObject() + .toString(); // create index assertAcked(prepareCreate(idxName).setMapping(mapping)); - mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("orientation", "right") - .endObject() - .endObject() - .endObject() - ); + mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("orientation", "right") + .endObject() + .endObject() + .endObject() + .toString(); assertAcked(prepareCreate(idxName + "2").setMapping(mapping)); ensureGreen(idxName, idxName + "2"); @@ -136,44 +133,43 @@ public void testIgnoreMalformed() throws Exception { ensureGreen(); // test self crossing ccw poly not crossing dateline - String polygonGeoJson = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .field("type", "Polygon") - .startArray("coordinates") - .startArray() - .startArray() - .value(176.0) - .value(15.0) - .endArray() - .startArray() - .value(-177.0) - .value(10.0) - .endArray() - .startArray() - .value(-177.0) - .value(-10.0) - .endArray() - .startArray() - .value(176.0) - .value(-15.0) - .endArray() - .startArray() - .value(-177.0) - .value(15.0) - .endArray() - .startArray() - .value(172.0) - .value(0.0) - .endArray() - .startArray() - .value(176.0) - .value(15.0) - .endArray() - .endArray() - .endArray() - .endObject() - ); + String polygonGeoJson = XContentFactory.jsonBuilder() + .startObject() + .field("type", "Polygon") + .startArray("coordinates") + .startArray() + .startArray() + .value(176.0) + .value(15.0) + .endArray() + .startArray() + .value(-177.0) + .value(10.0) + .endArray() + .startArray() + .value(-177.0) + .value(-10.0) + .endArray() + .startArray() + .value(176.0) + .value(-15.0) + .endArray() + .startArray() + .value(-177.0) + .value(15.0) + .endArray() + .startArray() + .value(172.0) + .value(0.0) + .endArray() + .startArray() + .value(176.0) + .value(15.0) + .endArray() + .endArray() + .endArray() + .endObject() + .toString(); indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", polygonGeoJson)); SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java index 04d193aaea71a..2bf3394762621 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.health.ClusterHealthStatus; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -270,7 +269,7 @@ public void testMoreLikeThisWithAliasesInLikeDocuments() throws Exception { String indexName = "foo"; String aliasName = "foo_name"; - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject().toString(); client().admin().indices().prepareCreate(indexName).setMapping(mapping).get(); client().admin().indices().prepareAliases().addAlias(indexName, aliasName).get(); @@ -292,7 +291,7 @@ public void testMoreLikeThisWithAliasesInLikeDocuments() throws Exception { } public void testMoreLikeThisIssue2197() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject().toString(); client().admin().indices().prepareCreate("foo").setMapping(mapping).get(); client().prepareIndex("foo") .setId("1") @@ -313,7 +312,7 @@ public void testMoreLikeThisIssue2197() throws Exception { // Issue #2489 public void testMoreLikeWithCustomRouting() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject().toString(); client().admin().indices().prepareCreate("foo").setMapping(mapping).get(); ensureGreen(); @@ -333,7 +332,7 @@ public void testMoreLikeWithCustomRouting() throws Exception { // Issue #3039 public void testMoreLikeThisIssueRoutingNotSerialized() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject().toString(); assertAcked( prepareCreate("foo", 2, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0)).setMapping( mapping diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java index e1724d496fa91..a531fd0eeb5a9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java @@ -35,7 +35,6 @@ import org.opensearch.action.explain.ExplainResponse; import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; -import org.opensearch.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.json.JsonXContent; @@ -140,14 +139,7 @@ public void testExists() throws Exception { assertSearchResponse(resp); try { assertEquals( - String.format( - Locale.ROOT, - "exists(%s, %d) mapping: %s response: %s", - fieldName, - count, - Strings.toString(mapping), - resp - ), + String.format(Locale.ROOT, "exists(%s, %d) mapping: %s response: %s", fieldName, count, mapping.toString(), resp), count, resp.getHits().getTotalHits().value ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java index 46b81ae2e750d..e2491600a9261 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java @@ -41,7 +41,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -373,17 +372,16 @@ public void testLenientFlagBeingTooLenient() throws Exception { } public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, InterruptedException, IOException { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("location") - .field("type", "text") - .field("analyzer", "standard") - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("location") + .field("type", "text") + .field("analyzer", "standard") + .endObject() + .endObject() + .endObject() + .toString(); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test1").setMapping(mapping); mappingRequest.get(); @@ -420,17 +418,16 @@ public void testSimpleQueryStringOnIndexMetaField() throws Exception { public void testEmptySimpleQueryStringWithAnalysis() throws Exception { // https://github.com/elastic/elasticsearch/issues/18202 - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("body") - .field("type", "text") - .field("analyzer", "stop") - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("body") + .field("type", "text") + .field("analyzer", "stop") + .endObject() + .endObject() + .endObject() + .toString(); CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test1").setMapping(mapping); mappingRequest.get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java index cc7e620f33216..dcc1136a6d267 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java @@ -42,7 +42,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.XContentBuilder; @@ -68,25 +67,24 @@ public class SearchSliceIT extends OpenSearchIntegTestCase { private void setupIndex(int numDocs, int numberOfShards) throws IOException, ExecutionException, InterruptedException { - String mapping = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") - .startObject("invalid_random_kw") - .field("type", "keyword") - .field("doc_values", "false") - .endObject() - .startObject("random_int") - .field("type", "integer") - .field("doc_values", "true") - .endObject() - .startObject("invalid_random_int") - .field("type", "integer") - .field("doc_values", "false") - .endObject() - .endObject() - .endObject() - ); + String mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("invalid_random_kw") + .field("type", "keyword") + .field("doc_values", "false") + .endObject() + .startObject("random_int") + .field("type", "integer") + .field("doc_values", "true") + .endObject() + .startObject("invalid_random_int") + .field("type", "integer") + .field("doc_values", "false") + .endObject() + .endObject() + .endObject() + .toString(); assertAcked( client().admin() .indices() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java index 8ff0790e7cb48..2905ef97d521e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java @@ -35,7 +35,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.ShardSearchFailure; -import org.opensearch.common.Strings; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.geo.GeoUtils; import org.opensearch.index.fielddata.ScriptDocValues; @@ -237,24 +236,23 @@ public void testSimpleSorts() throws Exception { } public void testSortMinValueScript() throws IOException { - String mapping = Strings.toString( - jsonBuilder().startObject() - .startObject("properties") - .startObject("lvalue") - .field("type", "long") - .endObject() - .startObject("dvalue") - .field("type", "double") - .endObject() - .startObject("svalue") - .field("type", "keyword") - .endObject() - .startObject("gvalue") - .field("type", "geo_point") - .endObject() - .endObject() - .endObject() - ); + String mapping = jsonBuilder().startObject() + .startObject("properties") + .startObject("lvalue") + .field("type", "long") + .endObject() + .startObject("dvalue") + .field("type", "double") + .endObject() + .startObject("svalue") + .field("type", "keyword") + .endObject() + .startObject("gvalue") + .field("type", "geo_point") + .endObject() + .endObject() + .endObject() + .toString(); assertAcked(prepareCreate("test").setMapping(mapping)); ensureGreen(); @@ -351,18 +349,17 @@ public void testDocumentsWithNullValue() throws Exception { // TODO: sort shouldn't fail when sort field is mapped dynamically // We have to specify mapping explicitly because by the time search is performed dynamic mapping might not // be propagated to all nodes yet and sort operation fail when the sort field is not defined - String mapping = Strings.toString( - jsonBuilder().startObject() - .startObject("properties") - .startObject("id") - .field("type", "keyword") - .endObject() - .startObject("svalue") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() - ); + String mapping = jsonBuilder().startObject() + .startObject("properties") + .startObject("id") + .field("type", "keyword") + .endObject() + .startObject("svalue") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .toString(); assertAcked(prepareCreate("test").setMapping(mapping)); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java index 3ffd6ce66831e..4aaa5bf5af852 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; @@ -1305,14 +1304,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE assertSuggestionSize(searchSuggest, 0, 10, "title"); // suggest with collate - String filterString = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("match_phrase") - .field("{{field}}", "{{suggestion}}") - .endObject() - .endObject() - ); + String filterString = XContentFactory.jsonBuilder() + .startObject() + .startObject("match_phrase") + .field("{{field}}", "{{suggestion}}") + .endObject() + .endObject() + .toString(); PhraseSuggestionBuilder filteredQuerySuggest = suggest.collateQuery(filterString); filteredQuerySuggest.collateParams(Collections.singletonMap("field", "title")); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", filteredQuerySuggest); @@ -1325,9 +1323,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE NumShards numShards = getNumShards("test"); // collate suggest with bad query - String incorrectFilterString = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("test").field("title", "{{suggestion}}").endObject().endObject() - ); + String incorrectFilterString = XContentFactory.jsonBuilder() + .startObject() + .startObject("test") + .field("title", "{{suggestion}}") + .endObject() + .endObject() + .toString(); PhraseSuggestionBuilder incorrectFilteredSuggest = suggest.collateQuery(incorrectFilterString); Map> namedSuggestion = new HashMap<>(); namedSuggestion.put("my_title_suggestion", incorrectFilteredSuggest); @@ -1339,9 +1341,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE } // suggest with collation - String filterStringAsFilter = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("match_phrase").field("title", "{{suggestion}}").endObject().endObject() - ); + String filterStringAsFilter = XContentFactory.jsonBuilder() + .startObject() + .startObject("match_phrase") + .field("title", "{{suggestion}}") + .endObject() + .endObject() + .toString(); PhraseSuggestionBuilder filteredFilterSuggest = suggest.collateQuery(filterStringAsFilter); searchSuggest = searchSuggest( @@ -1352,9 +1358,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE assertSuggestionSize(searchSuggest, 0, 2, "title"); // collate suggest with bad query - String filterStr = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("pprefix").field("title", "{{suggestion}}").endObject().endObject() - ); + String filterStr = XContentFactory.jsonBuilder() + .startObject() + .startObject("pprefix") + .field("title", "{{suggestion}}") + .endObject() + .endObject() + .toString(); suggest.collateQuery(filterStr); try { @@ -1365,14 +1375,13 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE } // collate script failure due to no additional params - String collateWithParams = Strings.toString( - XContentFactory.jsonBuilder() - .startObject() - .startObject("{{query_type}}") - .field("{{query_field}}", "{{suggestion}}") - .endObject() - .endObject() - ); + String collateWithParams = XContentFactory.jsonBuilder() + .startObject() + .startObject("{{query_type}}") + .field("{{query_field}}", "{{suggestion}}") + .endObject() + .endObject() + .toString(); try { searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion); diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/health/ClusterHealthResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/health/ClusterHealthResponse.java index 714abe86df71e..019a1e1417510 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/health/ClusterHealthResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/health/ClusterHealthResponse.java @@ -39,7 +39,7 @@ import org.opensearch.cluster.health.ClusterHealthStatus; import org.opensearch.cluster.health.ClusterIndexHealth; import org.opensearch.cluster.health.ClusterStateHealth; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.ClusterSettings; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java index 3dcb30a091a94..566ae06649168 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java @@ -36,7 +36,6 @@ import org.opensearch.action.support.nodes.BaseNodesResponse; import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.node.DiscoveryNodeRole; -import org.opensearch.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; @@ -165,7 +164,7 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return Strings.toString(builder); + return builder.toString(); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java index 22044f0c69c48..a4a69cd301b41 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java @@ -38,7 +38,6 @@ import org.opensearch.action.support.nodes.BaseNodesResponse; import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.xcontent.ToXContentFragment; @@ -99,7 +98,7 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return Strings.toString(builder); + return builder.toString(); } catch (final IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodesStatsResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodesStatsResponse.java index 0037d1ea27873..539bae311ac98 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodesStatsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/stats/NodesStatsResponse.java @@ -35,7 +35,6 @@ import org.opensearch.action.FailedNodeException; import org.opensearch.action.support.nodes.BaseNodesResponse; import org.opensearch.cluster.ClusterName; -import org.opensearch.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.xcontent.ToXContentFragment; @@ -92,7 +91,7 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return Strings.toString(builder); + return builder.toString(); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java index 4741504ddd035..f2ebde642d2be 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java @@ -35,7 +35,7 @@ import org.opensearch.OpenSearchException; import org.opensearch.action.TaskOperationFailure; import org.opensearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.ConstructingObjectParser; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/get/GetTaskResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/get/GetTaskResponse.java index b32e59fc77794..36bec88109cf1 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/get/GetTaskResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/get/GetTaskResponse.java @@ -33,7 +33,7 @@ package org.opensearch.action.admin.cluster.node.tasks.get; import org.opensearch.action.ActionResponse; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java index 85b165df68cfa..1e2e432882623 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java @@ -39,8 +39,8 @@ import org.opensearch.cluster.node.DiscoveryNodeRole; import org.opensearch.cluster.node.DiscoveryNodes; import org.opensearch.core.ParseField; -import org.opensearch.common.Strings; import org.opensearch.common.TriFunction; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/usage/NodesUsageResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/usage/NodesUsageResponse.java index 59b1f99429cfe..e0c8e6e8e269e 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/usage/NodesUsageResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/usage/NodesUsageResponse.java @@ -35,7 +35,6 @@ import org.opensearch.action.FailedNodeException; import org.opensearch.action.support.nodes.BaseNodesResponse; import org.opensearch.cluster.ClusterName; -import org.opensearch.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.xcontent.ToXContentFragment; @@ -93,7 +92,7 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return Strings.toString(builder); + return builder.toString(); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequest.java index eb1935158c231..fd29c324d51a1 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/restore/RestoreRemoteStoreRequest.java @@ -205,7 +205,7 @@ public int hashCode() { @Override public String toString() { - return org.opensearch.common.Strings.toString(XContentType.JSON, this); + return Strings.toString(XContentType.JSON, this); } } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponse.java index f6613c1d2ac50..4f0832816fd8a 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/remotestore/stats/RemoteStoreStatsResponse.java @@ -10,7 +10,7 @@ import org.opensearch.core.action.support.DefaultShardOperationFailedException; import org.opensearch.action.support.broadcast.BroadcastResponse; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java index 8b9142a39e9c9..9a97b67e1c2b7 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java @@ -35,7 +35,7 @@ import org.opensearch.action.ActionResponse; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.core.ParseField; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java index 8d29baa82562a..9a8206a4cfdba 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java @@ -34,7 +34,7 @@ import org.opensearch.action.ActionResponse; import org.opensearch.core.ParseField; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.ConstructingObjectParser; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequest.java index fedcfa1f5d9ff..5729386259df1 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequest.java @@ -36,7 +36,7 @@ import org.opensearch.action.IndicesRequest; import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.clustermanager.ClusterManagerNodeRequest; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index ce41066868202..9736d99b9f886 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -38,7 +38,7 @@ import org.opensearch.action.IndicesRequest; import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.clustermanager.ClusterManagerNodeRequest; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; @@ -74,7 +74,7 @@ *
  • must not contain hash sign ('#')
  • *
  • must not start with underscore ('_')
  • *
  • must be lowercase
  • - *
  • must not contain invalid file name characters {@link org.opensearch.core.common.Strings#INVALID_FILENAME_CHARS}
  • + *
  • must not contain invalid file name characters {@link Strings#INVALID_FILENAME_CHARS}
  • * * * @opensearch.internal @@ -390,7 +390,7 @@ public CreateSnapshotRequest settings(Map source) { try { XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.map(source); - settings(Strings.toString(builder), builder.contentType()); + settings(builder.toString(), builder.contentType()); } catch (IOException e) { throw new OpenSearchGenerationException("Failed to generate [" + source + "]", e); } @@ -447,7 +447,7 @@ public CreateSnapshotRequest source(Map source) { String name = entry.getKey(); if (name.equals("indices")) { if (entry.getValue() instanceof String) { - indices(org.opensearch.core.common.Strings.splitStringByCommaToArray((String) entry.getValue())); + indices(Strings.splitStringByCommaToArray((String) entry.getValue())); } else if (entry.getValue() instanceof List) { indices((List) entry.getValue()); } else { diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java index 3c96c67f78fd4..b8b4d972c95f7 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java @@ -33,7 +33,7 @@ package org.opensearch.action.admin.cluster.snapshots.get; import org.opensearch.action.ActionResponse; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java index 7a142e70305ae..0f9aa65afe3c2 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java @@ -749,6 +749,6 @@ public int hashCode() { @Override public String toString() { - return org.opensearch.common.Strings.toString(XContentType.JSON, this); + return Strings.toString(XContentType.JSON, this); } } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStats.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStats.java index 8b718aeaf70c7..c7690ea0d7817 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStats.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStats.java @@ -32,7 +32,7 @@ package org.opensearch.action.admin.cluster.snapshots.status; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStatus.java b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStatus.java index a3b401980b109..d1e25c1f1bdc4 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStatus.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/snapshots/status/SnapshotStatus.java @@ -35,7 +35,7 @@ import org.opensearch.cluster.SnapshotsInProgress; import org.opensearch.cluster.SnapshotsInProgress.State; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/stats/AnalysisStats.java b/server/src/main/java/org/opensearch/action/admin/cluster/stats/AnalysisStats.java index 9cdd5bf244ecb..84b093f9bb238 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/stats/AnalysisStats.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/stats/AnalysisStats.java @@ -35,7 +35,7 @@ import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.metadata.MappingMetadata; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsResponse.java index 1dda39a17babc..8926f41777809 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/stats/ClusterStatsResponse.java @@ -37,7 +37,6 @@ import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.health.ClusterHealthStatus; -import org.opensearch.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.xcontent.ToXContentFragment; @@ -168,7 +167,7 @@ public String toString() { builder.startObject(); toXContent(builder, EMPTY_PARAMS); builder.endObject(); - return Strings.toString(builder); + return builder.toString(); } catch (IOException e) { return "{ \"error\" : \"" + e.getMessage() + "\"}"; } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/stats/MappingStats.java b/server/src/main/java/org/opensearch/action/admin/cluster/stats/MappingStats.java index 90c78f30ea78d..66d1fc6a52295 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/stats/MappingStats.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/stats/MappingStats.java @@ -35,7 +35,7 @@ import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.metadata.MappingMetadata; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/alias/Alias.java b/server/src/main/java/org/opensearch/action/admin/indices/alias/Alias.java index 2f304668df01c..94dbb5ff46a02 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/alias/Alias.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/alias/Alias.java @@ -35,7 +35,7 @@ import org.opensearch.OpenSearchGenerationException; import org.opensearch.common.Nullable; import org.opensearch.core.ParseField; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; @@ -44,9 +44,7 @@ import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.QueryBuilder; import java.io.IOException; @@ -128,9 +126,9 @@ public Alias filter(Map filter) { return this; } try { - XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(MediaTypeRegistry.JSON); builder.map(filter); - this.filter = Strings.toString(builder); + this.filter = builder.toString(); return this; } catch (IOException e) { throw new OpenSearchGenerationException("Failed to generate [" + filter + "]", e); @@ -146,10 +144,10 @@ public Alias filter(QueryBuilder filterBuilder) { return this; } try { - XContentBuilder builder = XContentFactory.jsonBuilder(); + XContentBuilder builder = MediaTypeRegistry.JSON.contentBuilder(); filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.close(); - this.filter = Strings.toString(builder); + this.filter = builder.toString(); return this; } catch (IOException e) { throw new OpenSearchGenerationException("Failed to build json for alias request", e); @@ -279,7 +277,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (filter != null) { try (InputStream stream = new BytesArray(filter).streamInput()) { - builder.rawField(FILTER.getPreferredName(), stream, XContentType.JSON); + builder.rawField(FILTER.getPreferredName(), stream, MediaTypeRegistry.JSON); } } @@ -306,7 +304,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public String toString() { - return Strings.toString(XContentType.JSON, this); + return Strings.toString(MediaTypeRegistry.JSON, this); } @Override diff --git a/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java index 6ed9648502ee3..cd99a1067a8a4 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -431,7 +431,7 @@ public AliasActions filter(Map filter) { try { XContentBuilder builder = MediaTypeRegistry.contentBuilder(XContentType.JSON); builder.map(filter); - this.filter = org.opensearch.common.Strings.toString(builder); + this.filter = builder.toString(); return this; } catch (IOException e) { throw new OpenSearchGenerationException("Failed to generate [" + filter + "]", e); @@ -447,7 +447,7 @@ public AliasActions filter(QueryBuilder filter) { XContentBuilder builder = XContentFactory.jsonBuilder(); filter.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.close(); - this.filter = org.opensearch.common.Strings.toString(builder); + this.filter = builder.toString(); return this; } catch (IOException e) { throw new OpenSearchGenerationException("Failed to build json for alias request", e); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/analyze/AnalyzeAction.java b/server/src/main/java/org/opensearch/action/admin/indices/analyze/AnalyzeAction.java index 37e05151c8179..00144eedc438f 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/analyze/AnalyzeAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/analyze/AnalyzeAction.java @@ -36,7 +36,7 @@ import org.opensearch.action.ActionResponse; import org.opensearch.action.ActionType; import org.opensearch.action.support.single.shard.SingleShardRequest; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexResponse.java index a3c599a54a10c..c069cd17b8c51 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/close/CloseIndexResponse.java @@ -35,7 +35,7 @@ import org.opensearch.core.action.support.DefaultShardOperationFailedException; import org.opensearch.action.support.master.ShardsAcknowledgedResponse; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java index 53840b7697e45..001b466fc47e5 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java @@ -42,7 +42,6 @@ import org.opensearch.action.support.ActiveShardCount; import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.master.AcknowledgedRequest; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.StreamInput; @@ -230,7 +229,7 @@ private CreateIndexRequest settings(String source, MediaType mediaType) { * Allows to set the settings using a json builder. */ public CreateIndexRequest settings(XContentBuilder builder) { - settings(Strings.toString(builder), builder.contentType()); + settings(builder.toString(), builder.contentType()); return this; } @@ -346,7 +345,7 @@ private CreateIndexRequest mapping(String type, Map source) { try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); - return mapping(Strings.toString(builder)); + return mapping(builder.toString()); } catch (IOException e) { throw new OpenSearchGenerationException("Failed to generate [" + source + "]", e); } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java index df05c3dd665d2..b0fc6856eb43c 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/get/GetIndexResponse.java @@ -36,7 +36,7 @@ import org.opensearch.action.ActionResponse; import org.opensearch.cluster.metadata.AliasMetadata; import org.opensearch.cluster.metadata.MappingMetadata; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java index c4c9094e276d6..6b3fff19d532f 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java @@ -35,7 +35,7 @@ import org.opensearch.Version; import org.opensearch.action.ActionResponse; import org.opensearch.cluster.metadata.MappingMetadata; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockResponse.java index f90bdaca510ae..42dacfdb3ca2d 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/readonly/AddIndexBlockResponse.java @@ -35,7 +35,7 @@ import org.opensearch.core.action.support.DefaultShardOperationFailedException; import org.opensearch.action.support.master.ShardsAcknowledgedResponse; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/recovery/RecoveryResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/recovery/RecoveryResponse.java index 7664a73c27fc8..a7015a9d580df 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/recovery/RecoveryResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/recovery/RecoveryResponse.java @@ -34,7 +34,7 @@ import org.opensearch.core.action.support.DefaultShardOperationFailedException; import org.opensearch.action.support.broadcast.BroadcastResponse; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/replication/SegmentReplicationStatsResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/replication/SegmentReplicationStatsResponse.java index 63899668badca..99ff501c1eed8 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/replication/SegmentReplicationStatsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/replication/SegmentReplicationStatsResponse.java @@ -10,7 +10,7 @@ import org.opensearch.core.action.support.DefaultShardOperationFailedException; import org.opensearch.action.support.broadcast.BroadcastResponse; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverInfo.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverInfo.java index f58c842be374b..8503c9b882c93 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverInfo.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverInfo.java @@ -34,7 +34,7 @@ import org.opensearch.cluster.AbstractDiffable; import org.opensearch.cluster.Diff; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/settings/get/GetSettingsResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/settings/get/GetSettingsResponse.java index 05b06fc2b62c7..a1178fbe21f3c 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/settings/get/GetSettingsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/settings/get/GetSettingsResponse.java @@ -33,7 +33,6 @@ package org.opensearch.action.admin.indices.settings.get; import org.opensearch.action.ActionResponse; -import org.opensearch.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; @@ -193,7 +192,7 @@ public String toString() { ByteArrayOutputStream baos = new ByteArrayOutputStream(); XContentBuilder builder = new XContentBuilder(JsonXContent.jsonXContent, baos); toXContent(builder, ToXContent.EMPTY_PARAMS, false); - return Strings.toString(builder); + return builder.toString(); } catch (IOException e) { throw new IllegalStateException(e); // should not be possible here } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/settings/put/UpdateSettingsRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/settings/put/UpdateSettingsRequest.java index a7b7e005bce90..3c12f3eb8b728 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/settings/put/UpdateSettingsRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/settings/put/UpdateSettingsRequest.java @@ -36,7 +36,7 @@ import org.opensearch.action.IndicesRequest; import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.master.AcknowledgedRequest; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsResponse.java index 4014bad06ff9a..b262835dc2f2a 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/stats/IndicesStatsResponse.java @@ -36,7 +36,7 @@ import org.opensearch.core.action.support.DefaultShardOperationFailedException; import org.opensearch.action.support.broadcast.BroadcastResponse; import org.opensearch.cluster.routing.ShardRouting; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 099f7c34ff818..011f10bfaf6d6 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -41,7 +41,6 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.clustermanager.ClusterManagerNodeRequest; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.StreamInput; @@ -298,7 +297,7 @@ public PutIndexTemplateRequest mapping(Map source) { try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); - mappings = Strings.toString(builder); + mappings = builder.toString(); return this; } catch (IOException e) { throw new OpenSearchGenerationException("Failed to generate [" + source + "]", e); diff --git a/server/src/main/java/org/opensearch/action/bulk/BulkItemRequest.java b/server/src/main/java/org/opensearch/action/bulk/BulkItemRequest.java index 2a85b7abb741a..bbf887b71cbb2 100644 --- a/server/src/main/java/org/opensearch/action/bulk/BulkItemRequest.java +++ b/server/src/main/java/org/opensearch/action/bulk/BulkItemRequest.java @@ -36,7 +36,7 @@ import org.apache.lucene.util.RamUsageEstimator; import org.opensearch.action.DocWriteRequest; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/action/bulk/BulkItemResponse.java b/server/src/main/java/org/opensearch/action/bulk/BulkItemResponse.java index 26cd318e7a280..08a8e7b6d7865 100644 --- a/server/src/main/java/org/opensearch/action/bulk/BulkItemResponse.java +++ b/server/src/main/java/org/opensearch/action/bulk/BulkItemResponse.java @@ -41,7 +41,7 @@ import org.opensearch.action.index.IndexResponse; import org.opensearch.action.update.UpdateResponse; import org.opensearch.common.CheckedConsumer; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilities.java b/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilities.java index 8053e1f8521cf..c4298e75f8302 100644 --- a/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilities.java +++ b/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilities.java @@ -32,7 +32,7 @@ package org.opensearch.action.fieldcaps; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilitiesResponse.java b/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilitiesResponse.java index 550ef2d412ca4..5b14a0d5a40b4 100644 --- a/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilitiesResponse.java +++ b/server/src/main/java/org/opensearch/action/fieldcaps/FieldCapabilitiesResponse.java @@ -34,7 +34,7 @@ import org.opensearch.action.ActionResponse; import org.opensearch.core.ParseField; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.common.collect.Tuple; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; diff --git a/server/src/main/java/org/opensearch/action/get/GetResponse.java b/server/src/main/java/org/opensearch/action/get/GetResponse.java index b713dc8a507d1..abb1ddfe041c9 100644 --- a/server/src/main/java/org/opensearch/action/get/GetResponse.java +++ b/server/src/main/java/org/opensearch/action/get/GetResponse.java @@ -35,7 +35,7 @@ import org.opensearch.OpenSearchParseException; import org.opensearch.action.ActionResponse; import org.opensearch.core.common.ParsingException; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.document.DocumentField; import org.opensearch.core.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/opensearch/action/get/MultiGetRequest.java b/server/src/main/java/org/opensearch/action/get/MultiGetRequest.java index e8d0c1b9d320f..3a28b123b6539 100644 --- a/server/src/main/java/org/opensearch/action/get/MultiGetRequest.java +++ b/server/src/main/java/org/opensearch/action/get/MultiGetRequest.java @@ -43,7 +43,7 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.common.Nullable; import org.opensearch.core.common.ParsingException; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/action/index/IndexResponse.java b/server/src/main/java/org/opensearch/action/index/IndexResponse.java index 12d788323b497..c7c2138a63b4e 100644 --- a/server/src/main/java/org/opensearch/action/index/IndexResponse.java +++ b/server/src/main/java/org/opensearch/action/index/IndexResponse.java @@ -33,7 +33,7 @@ package org.opensearch.action.index; import org.opensearch.action.DocWriteResponse; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.XContentParser; diff --git a/server/src/main/java/org/opensearch/action/ingest/GetPipelineResponse.java b/server/src/main/java/org/opensearch/action/ingest/GetPipelineResponse.java index a26fa413b2f5b..a22f499c4add4 100644 --- a/server/src/main/java/org/opensearch/action/ingest/GetPipelineResponse.java +++ b/server/src/main/java/org/opensearch/action/ingest/GetPipelineResponse.java @@ -33,7 +33,7 @@ package org.opensearch.action.ingest; import org.opensearch.action.ActionResponse; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; diff --git a/server/src/main/java/org/opensearch/action/search/GetSearchPipelineResponse.java b/server/src/main/java/org/opensearch/action/search/GetSearchPipelineResponse.java index 4211839ce6569..cf8d9cec779c8 100644 --- a/server/src/main/java/org/opensearch/action/search/GetSearchPipelineResponse.java +++ b/server/src/main/java/org/opensearch/action/search/GetSearchPipelineResponse.java @@ -9,7 +9,7 @@ package org.opensearch.action.search; import org.opensearch.action.ActionResponse; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; diff --git a/server/src/main/java/org/opensearch/action/search/MultiSearchResponse.java b/server/src/main/java/org/opensearch/action/search/MultiSearchResponse.java index c576d87d85b0f..7facd62f90bad 100644 --- a/server/src/main/java/org/opensearch/action/search/MultiSearchResponse.java +++ b/server/src/main/java/org/opensearch/action/search/MultiSearchResponse.java @@ -36,7 +36,7 @@ import org.opensearch.OpenSearchException; import org.opensearch.action.ActionResponse; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/action/search/SearchResponse.java b/server/src/main/java/org/opensearch/action/search/SearchResponse.java index c7ab8f0858e7b..d4ebc0a2363af 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/opensearch/action/search/SearchResponse.java @@ -35,7 +35,7 @@ import org.apache.lucene.search.TotalHits; import org.opensearch.action.ActionResponse; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/action/support/replication/ReplicationTask.java b/server/src/main/java/org/opensearch/action/support/replication/ReplicationTask.java index 5aa37ec65ff0d..da00183515ae5 100644 --- a/server/src/main/java/org/opensearch/action/support/replication/ReplicationTask.java +++ b/server/src/main/java/org/opensearch/action/support/replication/ReplicationTask.java @@ -32,7 +32,7 @@ package org.opensearch.action.support.replication; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/main/java/org/opensearch/cluster/RepositoryCleanupInProgress.java b/server/src/main/java/org/opensearch/cluster/RepositoryCleanupInProgress.java index c399bd59dbbe1..be471ab6a68ec 100644 --- a/server/src/main/java/org/opensearch/cluster/RepositoryCleanupInProgress.java +++ b/server/src/main/java/org/opensearch/cluster/RepositoryCleanupInProgress.java @@ -33,7 +33,7 @@ import org.opensearch.LegacyESVersion; import org.opensearch.Version; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/cluster/SnapshotsInProgress.java b/server/src/main/java/org/opensearch/cluster/SnapshotsInProgress.java index 855f7755419d8..5f9c93ff254c9 100644 --- a/server/src/main/java/org/opensearch/cluster/SnapshotsInProgress.java +++ b/server/src/main/java/org/opensearch/cluster/SnapshotsInProgress.java @@ -35,7 +35,7 @@ import org.opensearch.Version; import org.opensearch.cluster.ClusterState.Custom; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributeValueHealth.java b/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributeValueHealth.java index 75b73be8fa12e..1520a293d2741 100644 --- a/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributeValueHealth.java +++ b/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributeValueHealth.java @@ -14,7 +14,7 @@ import org.opensearch.cluster.routing.RoutingNode; import org.opensearch.cluster.routing.ShardRoutingState; import org.opensearch.cluster.routing.WeightedRouting; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributesHealth.java b/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributesHealth.java index 340fcfe0d0d31..08832cb1e8807 100644 --- a/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributesHealth.java +++ b/server/src/main/java/org/opensearch/cluster/awarenesshealth/ClusterAwarenessAttributesHealth.java @@ -11,7 +11,7 @@ import org.opensearch.OpenSearchParseException; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java index b57da128ce852..4429136525534 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/Coordinator.java @@ -61,8 +61,8 @@ import org.opensearch.common.Nullable; import org.opensearch.common.Priority; import org.opensearch.common.SetOnce; -import org.opensearch.common.Strings; import org.opensearch.common.lifecycle.AbstractLifecycleComponent; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; diff --git a/server/src/main/java/org/opensearch/cluster/decommission/DecommissionAttributeMetadata.java b/server/src/main/java/org/opensearch/cluster/decommission/DecommissionAttributeMetadata.java index 8af783bbdc52e..5a9c82a3849e9 100644 --- a/server/src/main/java/org/opensearch/cluster/decommission/DecommissionAttributeMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/decommission/DecommissionAttributeMetadata.java @@ -14,7 +14,7 @@ import org.opensearch.cluster.NamedDiff; import org.opensearch.cluster.metadata.Metadata; import org.opensearch.cluster.metadata.Metadata.Custom; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/main/java/org/opensearch/cluster/health/ClusterShardHealth.java b/server/src/main/java/org/opensearch/cluster/health/ClusterShardHealth.java index 00a83c85c17be..32b54ac947ebd 100644 --- a/server/src/main/java/org/opensearch/cluster/health/ClusterShardHealth.java +++ b/server/src/main/java/org/opensearch/cluster/health/ClusterShardHealth.java @@ -37,7 +37,7 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.UnassignedInfo; import org.opensearch.cluster.routing.UnassignedInfo.AllocationStatus; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/cluster/metadata/AliasMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/AliasMetadata.java index 27ecca0358bd8..5e4de1be71214 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/AliasMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/AliasMetadata.java @@ -260,7 +260,7 @@ public static Diff readDiffFrom(StreamInput in) throws IOExceptio @Override public String toString() { - return org.opensearch.common.Strings.toString(XContentType.JSON, this, true, true); + return Strings.toString(XContentType.JSON, this, true, true); } @Override diff --git a/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplate.java b/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplate.java index 52096422248a5..7c5e0f664df4e 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplate.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplate.java @@ -35,7 +35,7 @@ import org.opensearch.cluster.AbstractDiffable; import org.opensearch.cluster.Diff; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplateMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplateMetadata.java index ce806b2aa1f12..2e700389e4fc9 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplateMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/ComponentTemplateMetadata.java @@ -36,7 +36,7 @@ import org.opensearch.cluster.Diff; import org.opensearch.cluster.DiffableUtils; import org.opensearch.cluster.NamedDiff; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplate.java b/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplate.java index 15e5cb5873719..b9b7c132ba2cf 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplate.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplate.java @@ -36,7 +36,7 @@ import org.opensearch.cluster.Diff; import org.opensearch.cluster.metadata.DataStream.TimestampField; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.common.io.stream.Writeable; diff --git a/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplateMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplateMetadata.java index d34416c70dc16..b72c0fdf81e41 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplateMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/ComposableIndexTemplateMetadata.java @@ -36,7 +36,7 @@ import org.opensearch.cluster.Diff; import org.opensearch.cluster.DiffableUtils; import org.opensearch.cluster.NamedDiff; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.core.ParseField; diff --git a/server/src/main/java/org/opensearch/cluster/metadata/DataStreamMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/DataStreamMetadata.java index 89fe6e9be2320..c7854355ea5cc 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/DataStreamMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/DataStreamMetadata.java @@ -36,7 +36,7 @@ import org.opensearch.cluster.Diff; import org.opensearch.cluster.DiffableUtils; import org.opensearch.cluster.NamedDiff; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.XContentType; diff --git a/server/src/main/java/org/opensearch/cluster/metadata/IndexTemplateMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/IndexTemplateMetadata.java index e430ba5465499..4a89298aa0429 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/IndexTemplateMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/IndexTemplateMetadata.java @@ -35,7 +35,6 @@ import org.opensearch.cluster.AbstractDiffable; import org.opensearch.cluster.Diff; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.core.common.io.stream.StreamInput; @@ -263,7 +262,7 @@ public String toString() { builder.startObject(); Builder.toXContentWithTypes(this, builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - return Strings.toString(builder); + return builder.toString(); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -480,7 +479,7 @@ public static IndexTemplateMetadata fromXContent(XContentParser parser, String t Map mappingSource = MapBuilder.newMapBuilder() .put(mappingType, parser.mapOrdered()) .map(); - builder.putMapping(mappingType, Strings.toString(XContentFactory.jsonBuilder().map(mappingSource))); + builder.putMapping(mappingType, XContentFactory.jsonBuilder().map(mappingSource).toString()); } } } else if ("aliases".equals(currentFieldName)) { @@ -496,7 +495,7 @@ public static IndexTemplateMetadata fromXContent(XContentParser parser, String t Map mapping = parser.mapOrdered(); if (mapping.size() == 1) { String mappingType = mapping.keySet().iterator().next(); - String mappingSource = Strings.toString(XContentFactory.jsonBuilder().map(mapping)); + String mappingSource = XContentFactory.jsonBuilder().map(mapping).toString(); if (mappingSource == null) { // crap, no mapping source, warn? diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java index 2c4e6bdec95b0..0779f61d97bf0 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java @@ -302,9 +302,11 @@ ClusterState addComponentTemplate( if (stringMappings != null) { Map parsedMappings = MapperService.parseMapping(xContentRegistry, stringMappings); if (parsedMappings.size() > 0) { - stringMappings = org.opensearch.common.Strings.toString( - XContentFactory.jsonBuilder().startObject().field(MapperService.SINGLE_MAPPING_NAME, parsedMappings).endObject() - ); + stringMappings = XContentFactory.jsonBuilder() + .startObject() + .field(MapperService.SINGLE_MAPPING_NAME, parsedMappings) + .endObject() + .toString(); } } @@ -591,9 +593,11 @@ public ClusterState addIndexTemplateV2( if (stringMappings != null) { Map parsedMappings = MapperService.parseMapping(xContentRegistry, stringMappings); if (parsedMappings.size() > 0) { - stringMappings = org.opensearch.common.Strings.toString( - XContentFactory.jsonBuilder().startObject().field(MapperService.SINGLE_MAPPING_NAME, parsedMappings).endObject() - ); + stringMappings = XContentFactory.jsonBuilder() + .startObject() + .field(MapperService.SINGLE_MAPPING_NAME, parsedMappings) + .endObject() + .toString(); } } final Template finalTemplate = new Template( diff --git a/server/src/main/java/org/opensearch/cluster/metadata/RepositoriesMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/RepositoriesMetadata.java index e654745e8ccdb..9123e4a8d2de3 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/RepositoriesMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/RepositoriesMetadata.java @@ -38,7 +38,7 @@ import org.opensearch.cluster.NamedDiff; import org.opensearch.cluster.metadata.Metadata.Custom; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; diff --git a/server/src/main/java/org/opensearch/cluster/metadata/Template.java b/server/src/main/java/org/opensearch/cluster/metadata/Template.java index 45d11dd9250e8..8e367c71ed166 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/Template.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/Template.java @@ -34,19 +34,18 @@ import org.opensearch.cluster.AbstractDiffable; import org.opensearch.common.Nullable; -import org.opensearch.core.ParseField; -import org.opensearch.common.Strings; import org.opensearch.common.compress.CompressedXContent; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.xcontent.XContentHelper; +import org.opensearch.core.ParseField; +import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.ConstructingObjectParser; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.mapper.MapperService; import java.io.IOException; @@ -77,7 +76,7 @@ public class Template extends AbstractDiffable