Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding reprovision integration tests #834

Merged
merged 21 commits into from
Aug 16, 2024
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@
*/
package org.opensearch.flowframework.model;

import org.apache.logging.log4j.util.Strings;
import org.opensearch.Version;
import org.opensearch.common.xcontent.LoggingDeprecationHandler;
import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.common.xcontent.yaml.YamlXContent;
import org.opensearch.commons.authuser.User;
import org.opensearch.core.common.Strings;
import org.opensearch.core.rest.RestStatus;
import org.opensearch.core.xcontent.NamedXContentRegistry;
import org.opensearch.core.xcontent.ToXContentObject;
Expand Down Expand Up @@ -372,10 +372,10 @@ public static Template updateExistingTemplate(Template existingTemplate, Templat
if (templateWithNewFields.name() != null) {
builder.name(templateWithNewFields.name());
}
if (!Strings.isBlank(templateWithNewFields.description())) {
if (Strings.hasText(templateWithNewFields.description())) {
builder.description(templateWithNewFields.description());
}
if (!Strings.isBlank(templateWithNewFields.useCase())) {
if (Strings.hasText(templateWithNewFields.useCase())) {
builder.useCase(templateWithNewFields.useCase());
}
if (templateWithNewFields.templateVersion() != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -415,6 +415,25 @@ protected Response createWorkflowValidation(RestClient client, Template template
return TestHelpers.makeRequest(client, "POST", WORKFLOW_URI, Collections.emptyMap(), template.toJson(), null);
}

/**
* Helper method to invoke the Reprovision Workflow API
* @param client the rest client
* @param workflowId the document id
* @param templateFields the template to reprovision
* @throws Exception if the request fails
* @return a rest response
*/
protected Response reprovisionWorkflow(RestClient client, String workflowId, Template template) throws Exception {
return TestHelpers.makeRequest(
client,
"PUT",
String.format(Locale.ROOT, "%s/%s?reprovision=true", WORKFLOW_URI, workflowId),
Collections.emptyMap(),
template.toJson(),
null
);
}

/**
* Helper method to invoke the Update Workflow API
* @param client the rest client
Expand Down
4 changes: 2 additions & 2 deletions src/test/java/org/opensearch/flowframework/TestHelpers.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import org.apache.hc.core5.http.Header;
import org.apache.hc.core5.http.HttpEntity;
import org.apache.hc.core5.http.io.entity.StringEntity;
import org.apache.logging.log4j.util.Strings;
import org.opensearch.client.Request;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.Response;
Expand All @@ -24,6 +23,7 @@
import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.commons.authuser.User;
import org.opensearch.core.common.Strings;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.rest.RestStatus;
import org.opensearch.core.xcontent.ToXContent;
Expand Down Expand Up @@ -74,7 +74,7 @@ public static Response makeRequest(
String jsonEntity,
List<Header> headers
) throws IOException {
HttpEntity httpEntity = Strings.isBlank(jsonEntity) ? null : new StringEntity(jsonEntity, APPLICATION_JSON);
HttpEntity httpEntity = !Strings.hasText(jsonEntity) ? null : new StringEntity(jsonEntity, APPLICATION_JSON);
return makeRequest(client, method, endpoint, params, httpEntity, headers);
}

Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
{
"name": "semantic search with local pretrained model",
"description": "Setting up semantic search, with a local pretrained embedding model",
"use_case": "SEMANTIC_SEARCH",
"version": {
joshpalis marked this conversation as resolved.
Show resolved Hide resolved
"template": "1.0.0",
"compatibility": [
"2.12.0",
"3.0.0"
]
},
"workflows": {
"provision": {
"nodes": [
{
"id": "create_openai_connector",
"type": "create_connector",
"user_inputs": {
"name": "OpenAI Chat Connector",
"description": "The connector to public OpenAI model service for text embedding model",
"version": "1",
"protocol": "http",
"parameters": {
"endpoint": "api.openai.com",
"model": "gpt-3.5-turbo",
"response_filter": "$.choices[0].message.content"
},
"credential": {
"openAI_key": "12345"
},
"actions": [
{
"action_type": "predict",
"method": "POST",
"url": "https://${parameters.endpoint}/v1/chat/completions"
}
]
}
},
{
"id": "register_openai_model",
"type": "register_remote_model",
"previous_node_inputs": {
"create_openai_connector": "connector_id"
},
"user_inputs": {
"name": "openAI-gpt-3.5-turbo",
"deploy": true
}
},
{
"id": "create_index",
"type": "create_index",
"user_inputs": {
"index_name": "my-nlp-index",
"configurations": {
"settings": {
"index.knn": true,
"index.number_of_shards": "2"
},
"mappings": {
"properties": {
"passage_embedding": {
"type": "knn_vector",
"dimension": "768",
"method": {
"engine": "lucene",
"space_type": "l2",
"name": "hnsw",
"parameters": {}
}
},
"passage_text": {
"type": "text"
}
}
}
}
}
}
]
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
{
"name": "semantic search with local pretrained model",
"description": "Setting up semantic search, with a local pretrained embedding model",
"use_case": "SEMANTIC_SEARCH",
"version": {
"template": "1.0.0",
"compatibility": [
"2.12.0",
"3.0.0"
]
},
"workflows": {
"provision": {
"nodes": [
{
"id": "create_openai_connector",
"type": "create_connector",
"user_inputs": {
"name": "OpenAI Chat Connector",
"description": "The connector to public OpenAI model service for text embedding model",
"version": "1",
"protocol": "http",
"parameters": {
"endpoint": "api.openai.com",
"model": "gpt-3.5-turbo",
"response_filter": "$.choices[0].message.content"
},
"credential": {
"openAI_key": "12345"
},
"actions": [
{
"action_type": "predict",
"method": "POST",
"url": "https://${parameters.endpoint}/v1/chat/completions"
}
]
}
},
{
"id": "register_openai_model",
"type": "register_remote_model",
"previous_node_inputs": {
"create_openai_connector": "connector_id"
},
"user_inputs": {
"name": "openAI-gpt-3.5-turbo",
"deploy": true
}
},
{
"id": "create_ingest_pipeline",
"type": "create_ingest_pipeline",
"previous_node_inputs": {
"register_openai_model": "model_id"
},
"user_inputs": {
"pipeline_id": "nlp-ingest-pipeline",
"configurations": {
"description": "A text embedding pipeline",
"processors": [
{
"text_embedding": {
"model_id": "${{register_openai_model.model_id}}",
"field_map": {
"passage_text": "passage_embedding"
}
}
}
]
}
}
},
{
"id": "create_index",
"type": "create_index",
"previous_node_inputs": {
"create_ingest_pipeline": "pipeline_id"
},
"user_inputs": {
"index_name": "my-nlp-index",
"configurations": {
"settings": {
"index.knn": true,
"default_pipeline": "${{create_ingest_pipeline.pipeline_id}}",
"index.number_of_shards": "2"
},
"mappings": {
"properties": {
"passage_embedding": {
"type": "knn_vector",
"dimension": "768",
"method": {
"engine": "lucene",
"space_type": "l2",
"name": "hnsw",
"parameters": {}
}
},
"passage_text": {
"type": "text"
}
}
}
}
}
}
]
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
{
"name": "semantic search with local pretrained model",
"description": "Setting up semantic search, with a local pretrained embedding model",
"use_case": "SEMANTIC_SEARCH",
"version": {
"template": "1.0.0",
"compatibility": [
"2.12.0",
"3.0.0"
]
},
"workflows": {
"provision": {
"nodes": [
{
"id": "create_openai_connector",
"type": "create_connector",
"user_inputs": {
"name": "OpenAI Chat Connector",
"description": "The connector to public OpenAI model service for text embedding model",
"version": "1",
"protocol": "http",
"parameters": {
"endpoint": "api.openai.com",
"model": "gpt-3.5-turbo",
"response_filter": "$.choices[0].message.content"
},
"credential": {
"openAI_key": "12345"
},
"actions": [
{
"action_type": "predict",
"method": "POST",
"url": "https://${parameters.endpoint}/v1/chat/completions"
}
]
}
},
{
"id": "register_openai_model",
"type": "register_remote_model",
"previous_node_inputs": {
"create_openai_connector": "connector_id"
},
"user_inputs": {
"name": "openAI-gpt-3.5-turbo",
"deploy": true
}
},
{
"id": "create_ingest_pipeline",
"type": "create_ingest_pipeline",
"previous_node_inputs": {
"register_openai_model": "model_id"
},
"user_inputs": {
"pipeline_id": "nlp-ingest-pipeline",
"configurations": {
"description": "A text embedding pipeline",
"processors": [
{
"text_embedding": {
"model_id": "${{register_openai_model.model_id}}",
"field_map": {
"passage_text": "passage_embedding"
}
}
}
]
}
}
},
{
"id": "create_index",
"type": "create_index",
"previous_node_inputs": {
"create_ingest_pipeline": "pipeline_id"
},
"user_inputs": {
"index_name": "my-nlp-index",
"configurations": {
"settings": {
"index.knn": true,
"default_pipeline": "_none",
"index.number_of_shards": "2"
},
"mappings": {
"properties": {
"passage_embedding": {
"type": "knn_vector",
"dimension": "768",
"method": {
"engine": "lucene",
"space_type": "l2",
"name": "hnsw",
"parameters": {}
}
},
"passage_text": {
"type": "text"
}
}
}
}
}
}
]
}
}
}
Loading
Loading