From ec1bd44b52d98acbe0a5bd2357b4c03e1e44ec80 Mon Sep 17 00:00:00 2001 From: Tyler Ohlsen Date: Mon, 18 Dec 2023 14:09:04 -0800 Subject: [PATCH 001/119] Include ML zip dependency in integTest and run tasks (#36) Signed-off-by: Tyler Ohlsen --- build.gradle | 180 +++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 152 insertions(+), 28 deletions(-) diff --git a/build.gradle b/build.gradle index 55aa878a..0d9b28ce 100644 --- a/build.gradle +++ b/build.gradle @@ -3,12 +3,23 @@ * SPDX-License-Identifier: Apache-2.0 */ +import org.opensearch.gradle.test.RestIntegTestTask +import java.util.concurrent.Callable + buildscript { ext { opensearch_group = "org.opensearch" opensearch_version = System.getProperty("opensearch.version", "2.11.0-SNAPSHOT") isSnapshot = "true" == System.getProperty("build.snapshot", "true") buildVersionQualifier = System.getProperty("build.version_qualifier", "") + version_tokens = opensearch_version.tokenize('-') + opensearch_build = version_tokens[0] + '.0' + if (buildVersionQualifier) { + opensearch_build += "-${buildVersionQualifier}" + } + if (isSnapshot) { + opensearch_build += "-SNAPSHOT" + } } repositories { @@ -25,8 +36,12 @@ buildscript { plugins { id 'java-library' - id 'com.diffplug.spotless' version '6.22.0' - id "io.freefair.lombok" version "8.0.1" + id 'com.diffplug.spotless' version '6.23.0' + id "io.freefair.lombok" version "8.4" +} + +lombok { + version = "1.18.30" } repositories { @@ -61,31 +76,48 @@ apply plugin: 'opensearch.opensearchplugin' apply plugin: 'opensearch.testclusters' apply plugin: 'opensearch.pluginzip' +def sqlJarDirectory = "$buildDir/dependencies/opensearch-sql-plugin" configurations { zipArchive all { - resolutionStrategy.force "org.mockito:mockito-core:5.5.0" + resolutionStrategy { + force "org.mockito:mockito-core:5.8.0" + force "com.google.guava:guava:32.1.3-jre" // CVE for 31.1 + force("org.eclipse.platform:org.eclipse.core.runtime:3.30.0") // CVE for < 3.29.0, forces JDK17 for spotless + } } } -def sqlJarDirectory = "$buildDir/dependencies/opensearch-sql-plugin" +task addJarsToClasspath(type: Copy) { + from(fileTree(dir: sqlJarDirectory)) { + include "opensearch-sql-${version}.jar" + include "ppl-${version}.jar" + include "protocol-${version}.jar" + } + into("$buildDir/classes") +} dependencies { compileOnly group: 'org.opensearch', name:'opensearch-ml-client', version: "${version}" compileOnly group: 'com.google.code.gson', name: 'gson', version: '2.10.1' - testImplementation "org.opensearch.test:framework:${opensearch_version}" - testImplementation "org.mockito:mockito-core:3.10.0" - testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.2' - testImplementation 'org.mockito:mockito-junit-jupiter:3.10.0' - testImplementation "com.nhaarman.mockitokotlin2:mockito-kotlin:2.2.0" - testImplementation "com.cronutils:cron-utils:9.1.6" - testImplementation "commons-validator:commons-validator:1.7" - testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.2' - compileOnly "org.apache.logging.log4j:log4j-slf4j-impl:2.19.0" - compileOnly group: 'org.json', name: 'json', version: '20230227' + compileOnly "org.apache.logging.log4j:log4j-slf4j-impl:2.22.0" + compileOnly group: 'org.json', name: 'json', version: '20231013' zipArchive group: 'org.opensearch.plugin', name:'opensearch-sql-plugin', version: "${version}" + implementation("com.google.guava:guava:32.1.3-jre") implementation fileTree(dir: sqlJarDirectory, include: ["opensearch-sql-${version}.jar", "ppl-${version}.jar", "protocol-${version}.jar"]) + compileOnly "org.opensearch:common-utils:${version}" + testImplementation "org.opensearch.test:framework:${opensearch_version}" + testImplementation "org.mockito:mockito-core:5.8.0" + testImplementation 'org.junit.jupiter:junit-jupiter-api:5.10.1' + testImplementation 'org.mockito:mockito-junit-jupiter:5.8.0' + testImplementation "com.nhaarman.mockitokotlin2:mockito-kotlin:2.2.0" + testImplementation "com.cronutils:cron-utils:9.2.1" + testImplementation "commons-validator:commons-validator:1.8.0" + testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.10.1' + + // ZipArchive dependencies used for integration tests + zipArchive group: 'org.opensearch.plugin', name:'opensearch-ml-plugin', version: "${opensearch_build}" } task extractSqlJar(type: Copy) { @@ -94,13 +126,15 @@ task extractSqlJar(type: Copy) { into sqlJarDirectory } -project.tasks.delombok.dependsOn(extractSqlJar) +tasks.addJarsToClasspath.dependsOn(extractSqlJar) +project.tasks.delombok.dependsOn(addJarsToClasspath) tasks.publishNebulaPublicationToMavenLocal.dependsOn ':generatePomFileForPluginZipPublication' tasks.validateNebulaPom.dependsOn ':generatePomFileForPluginZipPublication' dependencyLicenses.enabled = false loggerUsageCheck.enabled = false testingConventions.enabled = false +thirdPartyAudit.enabled = false test { useJUnitPlatform() @@ -112,12 +146,16 @@ test { } spotless { - java { - removeUnusedImports() - importOrder 'java', 'javax', 'org', 'com' - licenseHeaderFile 'spotless.license.java' - - eclipse().configFile rootProject.file('.eclipseformat.xml') + if (JavaVersion.current() >= JavaVersion.VERSION_17) { + // Spotless configuration for Java files + java { + removeUnusedImports() + importOrder 'java', 'javax', 'org', 'com' + licenseHeaderFile 'spotless.license.java' + eclipse().configFile rootProject.file('.eclipseformat.xml') + } + } else { + logger.lifecycle("Spotless plugin requires Java 17 or higher. Skipping Spotless tasks.") } } @@ -133,9 +171,9 @@ compileTestJava { opensearchplugin { - name 'agent-tools' - description 'OpenSearch Agent Tools' - classname 'org.opensearch.agent_tool.ToolPlugin' + name 'skills' + description 'OpenSearch Skills' + classname 'org.opensearch.agent.ToolPlugin' extendedPlugins = ['opensearch-ml'] licenseFile rootProject.file("LICENSE.txt") noticeFile rootProject.file("NOTICE") @@ -159,8 +197,8 @@ publishing { publications { pluginZip(MavenPublication) { publication -> pom { - name = "OpenSearch Agent Tools" - description = "OpenSearch Agent Tools" + name = "OpenSearch Skills" + description = "OpenSearch Skills" groupId = "org.opensearch.plugin" licenses { license { @@ -171,7 +209,7 @@ publishing { developers { developer { name = "OpenSearch" - url = "https://github.com/opensearch-project/agent-tools" + url = "https://github.com/opensearch-project/skills" } } } @@ -181,6 +219,92 @@ publishing { gradle.startParameter.setLogLevel(LogLevel.DEBUG) } +def opensearch_tmp_dir = rootProject.file('build/private/opensearch_tmp').absoluteFile +opensearch_tmp_dir.mkdirs() +def _numNodes = findProperty('numNodes') as Integer ?: 1 + +// Set up integration tests +task integTest(type: RestIntegTestTask) { + description = "Run tests against a cluster" + testClassesDirs = sourceSets.test.output.classesDirs + classpath = sourceSets.test.runtimeClasspath +} +tasks.named("check").configure { dependsOn(integTest) } + +integTest { + + dependsOn "bundlePlugin" + systemProperty 'tests.security.manager', 'false' + systemProperty 'java.io.tmpdir', opensearch_tmp_dir.absolutePath + systemProperty('project.root', project.rootDir.absolutePath) + systemProperty "https", System.getProperty("https") + systemProperty "user", System.getProperty("user") + systemProperty "password", System.getProperty("password") + + + // doFirst delays this block until execution time + doFirst { + // Tell the test JVM if the cluster JVM is running under a debugger so that tests can + // use longer timeouts for requests. + def isDebuggingCluster = getDebug() || System.getProperty("test.debug") != null + systemProperty 'cluster.debug', isDebuggingCluster + // Set number of nodes system property to be used in tests + systemProperty 'cluster.number_of_nodes', "${_numNodes}" + // There seems to be an issue when running multi node run or integ tasks with unicast_hosts + // not being written, the waitForAllConditions ensures it's written + getClusters().forEach { cluster -> + cluster.waitForAllConditions() + } + } + + // The --debug-jvm command-line option makes the cluster debuggable; this makes the tests debuggable + if (System.getProperty("test.debug") != null) { + jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005' + } +} + +// Set up integration test clusters, installs all zipArchive dependencies and this plugin +testClusters.integTest { + testDistribution = "ARCHIVE" + + // Installs all registered zipArchive dependencies on integTest cluster nodes + configurations.zipArchive.asFileTree.each { + plugin(provider(new Callable(){ + @Override + RegularFile call() throws Exception { + return new RegularFile() { + @Override + File getAsFile() { + return it + } + } + } + })) + } + + // Install skills plugin on integTest cluster nodes + plugin(project.tasks.bundlePlugin.archiveFile) + + // Cluster shrink exception thrown if we try to set numberOfNodes to 1, so only apply if > 1 + if (_numNodes > 1) numberOfNodes = _numNodes + + // When running integration tests it doesn't forward the --debug-jvm to the cluster anymore + // i.e. we have to use a custom property to flag when we want to debug OpenSearch JVM + // since we also support multi node integration tests we increase debugPort per node + if (System.getProperty("opensearch.debug") != null) { + def debugPort = 5005 + nodes.forEach { node -> + node.jvmArgs("-agentlib:jdwp=transport=dt_socket,server=n,suspend=y,address=*:${debugPort}") + debugPort += 1 + } + } +} + +// Automatically sets up the integration test cluster locally +run { + useCluster testClusters.integTest +} + // updateVersion: Task to auto increment to the next development iteration task updateVersion { onlyIf { System.getProperty('newVersion') } @@ -191,4 +315,4 @@ task updateVersion { // Include the required files that needs to be updated with new Version ant.replaceregexp(file:'build.gradle', match: '"opensearch.version", "\\d.*"', replace: '"opensearch.version", "' + newVersion.tokenize('-')[0] + '-SNAPSHOT"', flags:'g', byline:true) } -} +} \ No newline at end of file From d588b78f0f556248e5259452ec362a6ecbf70f74 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 14:21:38 -0800 Subject: [PATCH 002/119] add security policy for accessDeclaredMembers (#35) (#37) (cherry picked from commit c037b2afeebc746e8bd0fe38c7e8b0f8622030f3) Signed-off-by: Joshua Li Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- src/main/plugin-metadata/plugin-security.policy | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 src/main/plugin-metadata/plugin-security.policy diff --git a/src/main/plugin-metadata/plugin-security.policy b/src/main/plugin-metadata/plugin-security.policy new file mode 100644 index 00000000..4c512a49 --- /dev/null +++ b/src/main/plugin-metadata/plugin-security.policy @@ -0,0 +1,8 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +grant { + permission java.lang.RuntimePermission "accessDeclaredMembers"; +}; From ff17f3d56ff4ff1d027ba9964b1da57977bf3de7 Mon Sep 17 00:00:00 2001 From: zhichao-aws Date: Thu, 21 Dec 2023 14:52:48 +0800 Subject: [PATCH 003/119] fix version Signed-off-by: zhichao-aws --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 0d9b28ce..0056d791 100644 --- a/build.gradle +++ b/build.gradle @@ -9,7 +9,7 @@ import java.util.concurrent.Callable buildscript { ext { opensearch_group = "org.opensearch" - opensearch_version = System.getProperty("opensearch.version", "2.11.0-SNAPSHOT") + opensearch_version = System.getProperty("opensearch.version", "2.12.0-SNAPSHOT") isSnapshot = "true" == System.getProperty("build.snapshot", "true") buildVersionQualifier = System.getProperty("build.version_qualifier", "") version_tokens = opensearch_version.tokenize('-') From c9310299bdfde00e9af060a8b9716e356e3ad0c0 Mon Sep 17 00:00:00 2001 From: Tyler Ohlsen Date: Fri, 22 Dec 2023 14:20:21 -0800 Subject: [PATCH 004/119] [Backport 2.x] Add basic search detectors tool; pull plugin deps in gradle run (#50) Signed-off-by: Tyler Ohlsen --- build.gradle | 67 ++++-- .../tools/SearchAnomalyDetectorsTool.java | 204 ++++++++++++++++++ .../SearchAnomalyDetectorsToolTests.java | 153 +++++++++++++ 3 files changed, 409 insertions(+), 15 deletions(-) create mode 100644 src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java create mode 100644 src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java diff --git a/build.gradle b/build.gradle index 0056d791..fba99bc2 100644 --- a/build.gradle +++ b/build.gradle @@ -12,14 +12,6 @@ buildscript { opensearch_version = System.getProperty("opensearch.version", "2.12.0-SNAPSHOT") isSnapshot = "true" == System.getProperty("build.snapshot", "true") buildVersionQualifier = System.getProperty("build.version_qualifier", "") - version_tokens = opensearch_version.tokenize('-') - opensearch_build = version_tokens[0] + '.0' - if (buildVersionQualifier) { - opensearch_build += "-${buildVersionQualifier}" - } - if (isSnapshot) { - opensearch_build += "-SNAPSHOT" - } } repositories { @@ -77,6 +69,8 @@ apply plugin: 'opensearch.testclusters' apply plugin: 'opensearch.pluginzip' def sqlJarDirectory = "$buildDir/dependencies/opensearch-sql-plugin" +def jsJarDirectory = "$buildDir/dependencies/opensearch-job-scheduler" +def adJarDirectory = "$buildDir/dependencies/opensearch-anomaly-detection" configurations { zipArchive @@ -96,28 +90,52 @@ task addJarsToClasspath(type: Copy) { include "protocol-${version}.jar" } into("$buildDir/classes") + + from(fileTree(dir: jsJarDirectory)) { + include "opensearch-job-scheduler-${version}.jar" + } + into("$buildDir/classes") + + from(fileTree(dir: adJarDirectory)) { + include "opensearch-anomaly-detection-${version}.jar" + } + into("$buildDir/classes") } dependencies { - compileOnly group: 'org.opensearch', name:'opensearch-ml-client', version: "${version}" + // 3P dependencies compileOnly group: 'com.google.code.gson', name: 'gson', version: '2.10.1' compileOnly "org.apache.logging.log4j:log4j-slf4j-impl:2.22.0" compileOnly group: 'org.json', name: 'json', version: '20231013' - zipArchive group: 'org.opensearch.plugin', name:'opensearch-sql-plugin', version: "${version}" implementation("com.google.guava:guava:32.1.3-jre") + implementation group: 'org.apache.commons', name: 'commons-lang3', version: '3.13.0' + + // Plugin dependencies + compileOnly group: 'org.opensearch', name:'opensearch-ml-client', version: "${version}" + implementation fileTree(dir: jsJarDirectory, include: ["opensearch-job-scheduler-${version}.jar"]) + implementation fileTree(dir: adJarDirectory, include: ["opensearch-anomaly-detection-${version}.jar"]) implementation fileTree(dir: sqlJarDirectory, include: ["opensearch-sql-${version}.jar", "ppl-${version}.jar", "protocol-${version}.jar"]) compileOnly "org.opensearch:common-utils:${version}" + + // ZipArchive dependencies used for integration tests + zipArchive group: 'org.opensearch.plugin', name:'opensearch-ml-plugin', version: "${version}" + zipArchive group: 'org.opensearch.plugin', name:'opensearch-job-scheduler', version: "${version}" + zipArchive "org.opensearch.plugin:opensearch-anomaly-detection:${version}" + zipArchive group: 'org.opensearch.plugin', name:'opensearch-sql-plugin', version: "${version}" + + // Test dependencies testImplementation "org.opensearch.test:framework:${opensearch_version}" - testImplementation "org.mockito:mockito-core:5.8.0" + testImplementation group: 'junit', name: 'junit', version: '4.13.2' + testImplementation group: 'org.mockito', name: 'mockito-core', version: '5.8.0' + testImplementation group: 'org.mockito', name: 'mockito-inline', version: '5.2.0' + testImplementation("net.bytebuddy:byte-buddy:1.14.7") + testImplementation("net.bytebuddy:byte-buddy-agent:1.14.7") testImplementation 'org.junit.jupiter:junit-jupiter-api:5.10.1' testImplementation 'org.mockito:mockito-junit-jupiter:5.8.0' testImplementation "com.nhaarman.mockitokotlin2:mockito-kotlin:2.2.0" testImplementation "com.cronutils:cron-utils:9.2.1" testImplementation "commons-validator:commons-validator:1.8.0" testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.10.1' - - // ZipArchive dependencies used for integration tests - zipArchive group: 'org.opensearch.plugin', name:'opensearch-ml-plugin', version: "${opensearch_build}" } task extractSqlJar(type: Copy) { @@ -126,7 +144,21 @@ task extractSqlJar(type: Copy) { into sqlJarDirectory } +task extractJsJar(type: Copy) { + mustRunAfter() + from(zipTree(configurations.zipArchive.find { it.name.startsWith("opensearch-job-scheduler")})) + into jsJarDirectory +} + +task extractAdJar(type: Copy) { + mustRunAfter() + from(zipTree(configurations.zipArchive.find { it.name.startsWith("opensearch-anomaly-detection")})) + into adJarDirectory +} + tasks.addJarsToClasspath.dependsOn(extractSqlJar) +tasks.addJarsToClasspath.dependsOn(extractJsJar) +tasks.addJarsToClasspath.dependsOn(extractAdJar) project.tasks.delombok.dependsOn(addJarsToClasspath) tasks.publishNebulaPublicationToMavenLocal.dependsOn ':generatePomFileForPluginZipPublication' tasks.validateNebulaPom.dependsOn ':generatePomFileForPluginZipPublication' @@ -137,12 +169,13 @@ testingConventions.enabled = false thirdPartyAudit.enabled = false test { - useJUnitPlatform() testLogging { exceptionFormat "full" events "skipped", "passed", "failed" // "started" showStandardStreams true } + include '**/*Tests.class' + systemProperty 'tests.security.manager', 'false' } spotless { @@ -161,6 +194,8 @@ spotless { compileJava { dependsOn extractSqlJar + dependsOn extractJsJar + dependsOn extractAdJar dependsOn delombok options.compilerArgs.addAll(["-processor", 'lombok.launch.AnnotationProcessorHider$AnnotationProcessor']) } @@ -169,6 +204,8 @@ compileTestJava { options.compilerArgs.addAll(["-processor", 'lombok.launch.AnnotationProcessorHider$AnnotationProcessor']) } +forbiddenApisTest.ignoreFailures = true + opensearchplugin { name 'skills' diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java new file mode 100644 index 00000000..357668c9 --- /dev/null +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java @@ -0,0 +1,204 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.ad.client.AnomalyDetectionNodeClient; +import org.opensearch.client.Client; +import org.opensearch.core.action.ActionListener; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.RangeQueryBuilder; +import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.index.query.WildcardQueryBuilder; +import org.opensearch.ml.common.output.model.ModelTensors; +import org.opensearch.ml.common.spi.tools.Parser; +import org.opensearch.ml.common.spi.tools.Tool; +import org.opensearch.ml.common.spi.tools.ToolAnnotation; +import org.opensearch.search.SearchHit; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.search.sort.SortOrder; + +import lombok.Getter; +import lombok.Setter; + +@ToolAnnotation(SearchAnomalyDetectorsTool.TYPE) +public class SearchAnomalyDetectorsTool implements Tool { + public static final String TYPE = "SearchAnomalyDetectorsTool"; + private static final String DEFAULT_DESCRIPTION = "Use this tool to search anomaly detectors."; + + @Setter + @Getter + private String name = TYPE; + @Getter + @Setter + private String description = DEFAULT_DESCRIPTION; + + @Getter + private String version; + + private Client client; + + private AnomalyDetectionNodeClient adClient; + + @Setter + private Parser inputParser; + @Setter + private Parser outputParser; + + public SearchAnomalyDetectorsTool(Client client) { + this.client = client; + this.adClient = new AnomalyDetectionNodeClient(client); + + // probably keep this overridden output parser. need to ensure the output matches what's expected + outputParser = new Parser<>() { + @Override + public Object parse(Object o) { + @SuppressWarnings("unchecked") + List mlModelOutputs = (List) o; + return mlModelOutputs.get(0).getMlModelTensors().get(0).getDataAsMap().get("response"); + } + }; + } + + // Response is currently in a simple string format including the list of anomaly detectors (only name and ID attached), and + // number of total detectors. The output will likely need to be updated, standardized, and include more fields in the + // future to cover a sufficient amount of potential questions the agent will need to handle. + @Override + public void run(Map parameters, ActionListener listener) { + final String detectorName = parameters.getOrDefault("detectorName", null); + final String detectorNamePattern = parameters.getOrDefault("detectorNamePattern", null); + final String indices = parameters.getOrDefault("indices", null); + final Boolean highCardinality = parameters.containsKey("highCardinality") + ? Boolean.parseBoolean(parameters.get("highCardinality")) + : null; + final Long lastUpdateTime = parameters.containsKey("lastUpdateTime") && StringUtils.isNumeric(parameters.get("lastUpdateTime")) + ? Long.parseLong(parameters.get("lastUpdateTime")) + : null; + final String sortOrderStr = parameters.getOrDefault("sortOrder", "asc"); + final SortOrder sortOrder = sortOrderStr.equalsIgnoreCase("asc") ? SortOrder.ASC : SortOrder.DESC; + final String sortString = parameters.getOrDefault("sortString", "name.keyword"); + final int size = parameters.containsKey("size") ? Integer.parseInt(parameters.get("size")) : 20; + final int startIndex = parameters.containsKey("startIndex") ? Integer.parseInt(parameters.get("startIndex")) : 0; + final Boolean running = parameters.containsKey("running") ? Boolean.parseBoolean(parameters.get("running")) : null; + final Boolean disabled = parameters.containsKey("disabled") ? Boolean.parseBoolean(parameters.get("disabled")) : null; + final Boolean failed = parameters.containsKey("failed") ? Boolean.parseBoolean(parameters.get("failed")) : null; + + List mustList = new ArrayList(); + if (detectorName != null) { + mustList.add(new TermQueryBuilder("name.keyword", detectorName)); + } + if (detectorNamePattern != null) { + mustList.add(new WildcardQueryBuilder("name.keyword", detectorNamePattern)); + } + if (indices != null) { + mustList.add(new TermQueryBuilder("indices", indices)); + } + if (highCardinality != null) { + mustList.add(new TermQueryBuilder("detector_type", highCardinality ? "MULTI_ENTITY" : "SINGLE_ENTITY")); + } + if (lastUpdateTime != null) { + mustList.add(new BoolQueryBuilder().filter(new RangeQueryBuilder("last_update_time").gte(lastUpdateTime))); + + } + + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + boolQueryBuilder.must().addAll(mustList); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .query(boolQueryBuilder) + .size(size) + .from(startIndex) + .sort(sortString, sortOrder); + + SearchRequest searchDetectorRequest = new SearchRequest().source(searchSourceBuilder); + + if (running != null || disabled != null || failed != null) { + // TODO: add a listener to trigger when the first response is received, to trigger the profile API call + // to fetch the detector state, etc. + // Will need AD client to onboard the profile API first. + } + + ActionListener searchDetectorListener = ActionListener.wrap(response -> { + StringBuilder sb = new StringBuilder(); + SearchHit[] hits = response.getHits().getHits(); + sb.append("AnomalyDetectors=["); + for (SearchHit hit : hits) { + sb.append("{"); + sb.append("id=").append(hit.getId()).append(","); + sb.append("name=").append(hit.getSourceAsMap().get("name")); + sb.append("}"); + } + sb.append("]"); + sb.append("TotalAnomalyDetectors=").append(response.getHits().getTotalHits().value); + listener.onResponse((T) sb.toString()); + }, e -> { listener.onFailure(e); }); + + adClient.searchAnomalyDetectors(searchDetectorRequest, searchDetectorListener); + } + + @Override + public boolean validate(Map parameters) { + return true; + } + + @Override + public String getType() { + return TYPE; + } + + /** + * Factory for the {@link SearchAnomalyDetectorsTool} + */ + public static class Factory implements Tool.Factory { + private Client client; + + private AnomalyDetectionNodeClient adClient; + + private static Factory INSTANCE; + + /** + * Create or return the singleton factory instance + */ + public static Factory getInstance() { + if (INSTANCE != null) { + return INSTANCE; + } + synchronized (SearchAnomalyDetectorsTool.class) { + if (INSTANCE != null) { + return INSTANCE; + } + INSTANCE = new Factory(); + return INSTANCE; + } + } + + /** + * Initialize this factory + * @param client The OpenSearch client + */ + public void init(Client client) { + this.client = client; + this.adClient = new AnomalyDetectionNodeClient(client); + } + + @Override + public SearchAnomalyDetectorsTool create(Map map) { + return new SearchAnomalyDetectorsTool(client); + } + + @Override + public String getDefaultDescription() { + return DEFAULT_DESCRIPTION; + } + } + +} diff --git a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java new file mode 100644 index 00000000..37ff02a1 --- /dev/null +++ b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java @@ -0,0 +1,153 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Locale; +import java.util.Map; + +import org.apache.lucene.search.TotalHits; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.opensearch.action.ActionType; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.search.SearchResponseSections; +import org.opensearch.client.AdminClient; +import org.opensearch.client.ClusterAdminClient; +import org.opensearch.client.IndicesAdminClient; +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.ml.common.spi.tools.Tool; +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; +import org.opensearch.search.aggregations.Aggregations; + +public class SearchAnomalyDetectorsToolTests { + @Mock + private NodeClient nodeClient; + @Mock + private AdminClient adminClient; + @Mock + private IndicesAdminClient indicesAdminClient; + @Mock + private ClusterAdminClient clusterAdminClient; + + private Map nullParams; + private Map emptyParams; + private Map nonEmptyParams; + + @Before + public void setup() { + MockitoAnnotations.openMocks(this); + SearchAnomalyDetectorsTool.Factory.getInstance().init(nodeClient); + + nullParams = null; + emptyParams = Collections.emptyMap(); + nonEmptyParams = Map.of("detectorName", "foo"); + } + + @Test + public void testRunWithNoDetectors() throws Exception { + Tool tool = SearchAnomalyDetectorsTool.Factory.getInstance().create(Collections.emptyMap()); + + SearchHit[] hits = new SearchHit[0]; + + TotalHits totalHits = new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO); + + SearchResponse getDetectorsResponse = new SearchResponse( + new SearchResponseSections(new SearchHits(hits, totalHits, 0), new Aggregations(new ArrayList<>()), null, false, null, null, 0), + null, + 0, + 0, + 0, + 0, + null, + null + ); + String expectedResponseStr = String.format(Locale.getDefault(), "AnomalyDetectors=[]TotalAnomalyDetectors=%d", hits.length); + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + + doAnswer((invocation) -> { + ActionListener responseListener = invocation.getArgument(2); + responseListener.onResponse(getDetectorsResponse); + return null; + }).when(nodeClient).execute(any(ActionType.class), any(), any()); + + tool.run(emptyParams, listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertEquals(expectedResponseStr, responseCaptor.getValue()); + } + + @Test + public void testRunWithSingleAnomalyDetector() throws Exception { + final String detectorName = "detector-1"; + final String detectorId = "detector-1-id"; + Tool tool = SearchAnomalyDetectorsTool.Factory.getInstance().create(Collections.emptyMap()); + + XContentBuilder content = XContentBuilder.builder(XContentType.JSON.xContent()); + content.startObject(); + content.field("name", detectorName); + content.endObject(); + SearchHit[] hits = new SearchHit[1]; + hits[0] = new SearchHit(0, detectorId, null, null).sourceRef(BytesReference.bytes(content)); + + TotalHits totalHits = new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO); + + SearchResponse getDetectorsResponse = new SearchResponse( + new SearchResponseSections(new SearchHits(hits, totalHits, 0), new Aggregations(new ArrayList<>()), null, false, null, null, 0), + null, + 0, + 0, + 0, + 0, + null, + null + ); + String expectedResponseStr = String + .format("AnomalyDetectors=[{id=%s,name=%s}]TotalAnomalyDetectors=%d", detectorId, detectorName, hits.length); + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + + doAnswer((invocation) -> { + ActionListener responseListener = invocation.getArgument(2); + responseListener.onResponse(getDetectorsResponse); + return null; + }).when(nodeClient).execute(any(ActionType.class), any(), any()); + + tool.run(emptyParams, listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertEquals(expectedResponseStr, responseCaptor.getValue()); + } + + @Test + public void testValidate() { + Tool tool = SearchAnomalyDetectorsTool.Factory.getInstance().create(Collections.emptyMap()); + assertEquals(SearchAnomalyDetectorsTool.TYPE, tool.getType()); + assertTrue(tool.validate(emptyParams)); + assertTrue(tool.validate(nonEmptyParams)); + assertTrue(tool.validate(nullParams)); + } +} From c6e18c615827725f334e48b58d657eacc71c529f Mon Sep 17 00:00:00 2001 From: zhichao-aws Date: Mon, 25 Dec 2023 15:43:58 +0800 Subject: [PATCH 005/119] [Backport 2.x] feature: Add AbstractRetriverTool, VectorDBTool, NeuralSparseTools (#58) * Merge pull request #40 from zhichao-aws/SearchTools feature: Add AbstractRetriverTool, VectorDBTool, NeuralSparseTools (cherry picked from commit c088f7793c3e93ba0488b0be8bacc49c7195682a) * fix commons-lang3 version (#45) (#59) Signed-off-by: zhichao-aws --------- Signed-off-by: zhichao-aws Co-authored-by: zane-neo --- build.gradle | 10 +- .../java/org/opensearch/agent/ToolPlugin.java | 6 +- .../agent/tools/AbstractRetrieverTool.java | 140 ++++++++++++++ .../agent/tools/NeuralSparseSearchTool.java | 110 +++++++++++ .../opensearch/agent/tools/VectorDBTool.java | 119 ++++++++++++ .../tools/AbstractRetrieverToolTests.java | 183 ++++++++++++++++++ .../tools/NeuralSparseSearchToolTests.java | 116 +++++++++++ .../agent/tools/VectorDBToolTests.java | 103 ++++++++++ .../retrieval_tool_empty_search_response.json | 18 ++ .../tools/retrieval_tool_search_response.json | 35 ++++ 10 files changed, 834 insertions(+), 6 deletions(-) create mode 100644 src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java create mode 100644 src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java create mode 100644 src/main/java/org/opensearch/agent/tools/VectorDBTool.java create mode 100644 src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java create mode 100644 src/test/java/org/opensearch/agent/tools/NeuralSparseSearchToolTests.java create mode 100644 src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java create mode 100644 src/test/resources/org/opensearch/agent/tools/retrieval_tool_empty_search_response.json create mode 100644 src/test/resources/org/opensearch/agent/tools/retrieval_tool_search_response.json diff --git a/build.gradle b/build.gradle index fba99bc2..49411109 100644 --- a/build.gradle +++ b/build.gradle @@ -76,7 +76,7 @@ configurations { zipArchive all { resolutionStrategy { - force "org.mockito:mockito-core:5.8.0" + force "org.mockito:mockito-core:${versions.mockito}" force "com.google.guava:guava:32.1.3-jre" // CVE for 31.1 force("org.eclipse.platform:org.eclipse.core.runtime:3.30.0") // CVE for < 3.29.0, forces JDK17 for spotless } @@ -107,8 +107,8 @@ dependencies { compileOnly group: 'com.google.code.gson', name: 'gson', version: '2.10.1' compileOnly "org.apache.logging.log4j:log4j-slf4j-impl:2.22.0" compileOnly group: 'org.json', name: 'json', version: '20231013' - implementation("com.google.guava:guava:32.1.3-jre") - implementation group: 'org.apache.commons', name: 'commons-lang3', version: '3.13.0' + compileOnly("com.google.guava:guava:32.1.3-jre") + compileOnly group: 'org.apache.commons', name: 'commons-lang3', version: '3.10' // Plugin dependencies compileOnly group: 'org.opensearch', name:'opensearch-ml-client', version: "${version}" @@ -128,8 +128,8 @@ dependencies { testImplementation group: 'junit', name: 'junit', version: '4.13.2' testImplementation group: 'org.mockito', name: 'mockito-core', version: '5.8.0' testImplementation group: 'org.mockito', name: 'mockito-inline', version: '5.2.0' - testImplementation("net.bytebuddy:byte-buddy:1.14.7") - testImplementation("net.bytebuddy:byte-buddy-agent:1.14.7") + testImplementation("net.bytebuddy:byte-buddy:1.14.7") + testImplementation("net.bytebuddy:byte-buddy-agent:1.14.7") testImplementation 'org.junit.jupiter:junit-jupiter-api:5.10.1' testImplementation 'org.mockito:mockito-junit-jupiter:5.8.0' testImplementation "com.nhaarman.mockitokotlin2:mockito-kotlin:2.2.0" diff --git a/src/main/java/org/opensearch/agent/ToolPlugin.java b/src/main/java/org/opensearch/agent/ToolPlugin.java index eba2f6a1..8e3d0844 100644 --- a/src/main/java/org/opensearch/agent/ToolPlugin.java +++ b/src/main/java/org/opensearch/agent/ToolPlugin.java @@ -10,7 +10,9 @@ import java.util.List; import java.util.function.Supplier; +import org.opensearch.agent.tools.NeuralSparseSearchTool; import org.opensearch.agent.tools.PPLTool; +import org.opensearch.agent.tools.VectorDBTool; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; @@ -54,11 +56,13 @@ public Collection createComponents( this.xContentRegistry = xContentRegistry; PPLTool.Factory.getInstance().init(client); + NeuralSparseSearchTool.Factory.getInstance().init(client, xContentRegistry); + VectorDBTool.Factory.getInstance().init(client, xContentRegistry); return Collections.emptyList(); } @Override public List> getToolFactories() { - return List.of(PPLTool.Factory.getInstance()); + return List.of(PPLTool.Factory.getInstance(), NeuralSparseSearchTool.Factory.getInstance(), VectorDBTool.Factory.getInstance()); } } diff --git a/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java b/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java new file mode 100644 index 00000000..dba48070 --- /dev/null +++ b/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java @@ -0,0 +1,140 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.opensearch.ml.common.utils.StringUtils.gson; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Client; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.ml.common.spi.tools.Tool; +import org.opensearch.search.SearchHit; +import org.opensearch.search.builder.SearchSourceBuilder; + +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j2; + +/** + * Abstract tool supports search paradigms in neural-search plugin. + */ +@Log4j2 +@Getter +@Setter +public abstract class AbstractRetrieverTool implements Tool { + public static final String DEFAULT_DESCRIPTION = "Use this tool to search data in OpenSearch index."; + public static final String INPUT_FIELD = "input"; + public static final String INDEX_FIELD = "index"; + public static final String SOURCE_FIELD = "source_field"; + public static final String DOC_SIZE_FIELD = "doc_size"; + public static final int DEFAULT_DOC_SIZE = 2; + + protected String description = DEFAULT_DESCRIPTION; + protected Client client; + protected NamedXContentRegistry xContentRegistry; + protected String index; + protected String[] sourceFields; + protected Integer docSize; + protected String version; + + protected AbstractRetrieverTool( + Client client, + NamedXContentRegistry xContentRegistry, + String index, + String[] sourceFields, + Integer docSize + ) { + this.client = client; + this.xContentRegistry = xContentRegistry; + this.index = index; + this.sourceFields = sourceFields; + this.docSize = docSize == null ? DEFAULT_DOC_SIZE : docSize; + } + + protected abstract String getQueryBody(String queryText); + + private SearchRequest buildSearchRequest(Map parameters) throws IOException { + String question = parameters.get(INPUT_FIELD); + if (StringUtils.isBlank(question)) { + throw new IllegalArgumentException("[" + INPUT_FIELD + "] is null or empty, can not process it."); + } + + String query = getQueryBody(question); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + XContentParser queryParser = XContentType.JSON.xContent().createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, query); + searchSourceBuilder.parseXContent(queryParser); + searchSourceBuilder.fetchSource(sourceFields, null); + searchSourceBuilder.size(docSize); + SearchRequest searchRequest = new SearchRequest().source(searchSourceBuilder).indices(index); + return searchRequest; + } + + @Override + public void run(Map parameters, ActionListener listener) { + SearchRequest searchRequest; + try { + searchRequest = buildSearchRequest(parameters); + } catch (Exception e) { + log.error("Failed to build search request.", e); + listener.onFailure(e); + return; + } + + ActionListener actionListener = ActionListener.wrap(r -> { + SearchHit[] hits = r.getHits().getHits(); + + if (hits != null && hits.length > 0) { + StringBuilder contextBuilder = new StringBuilder(); + for (int i = 0; i < hits.length; i++) { + SearchHit hit = hits[i]; + Map docContent = new HashMap<>(); + docContent.put("_index", hit.getIndex()); + docContent.put("_id", hit.getId()); + docContent.put("_score", hit.getScore()); + docContent.put("_source", hit.getSourceAsMap()); + contextBuilder.append(gson.toJson(docContent)).append("\n"); + } + listener.onResponse((T) contextBuilder.toString()); + } else { + listener.onResponse((T) "Can not get any match from search result."); + } + }, e -> { + log.error("Failed to search index.", e); + listener.onFailure(e); + }); + client.search(searchRequest, actionListener); + } + + @Override + public boolean validate(Map parameters) { + return parameters != null && parameters.size() > 0 && !StringUtils.isBlank(parameters.get("input")); + } + + protected static abstract class Factory implements Tool.Factory { + protected Client client; + protected NamedXContentRegistry xContentRegistry; + + public void init(Client client, NamedXContentRegistry xContentRegistry) { + this.client = client; + this.xContentRegistry = xContentRegistry; + } + + @Override + public String getDefaultDescription() { + return DEFAULT_DESCRIPTION; + } + } +} diff --git a/src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java b/src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java new file mode 100644 index 00000000..40c57aba --- /dev/null +++ b/src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java @@ -0,0 +1,110 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.opensearch.ml.common.utils.StringUtils.gson; + +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.opensearch.client.Client; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.ml.common.spi.tools.ToolAnnotation; + +import lombok.Builder; +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j2; + +/** + * This tool supports neural_sparse search with sparse encoding models and rank_features field. + */ +@Log4j2 +@Getter +@Setter +@ToolAnnotation(NeuralSparseSearchTool.TYPE) +public class NeuralSparseSearchTool extends AbstractRetrieverTool { + public static final String TYPE = "NeuralSparseSearchTool"; + public static final String MODEL_ID_FIELD = "model_id"; + public static final String EMBEDDING_FIELD = "embedding_field"; + + private String name = TYPE; + private String modelId; + private String embeddingField; + + @Builder + public NeuralSparseSearchTool( + Client client, + NamedXContentRegistry xContentRegistry, + String index, + String embeddingField, + String[] sourceFields, + Integer docSize, + String modelId + ) { + super(client, xContentRegistry, index, sourceFields, docSize); + this.modelId = modelId; + this.embeddingField = embeddingField; + } + + @Override + protected String getQueryBody(String queryText) { + if (StringUtils.isBlank(embeddingField) || StringUtils.isBlank(modelId)) { + throw new IllegalArgumentException( + "Parameter [" + EMBEDDING_FIELD + "] and [" + MODEL_ID_FIELD + "] can not be null or empty." + ); + } + return "{\"query\":{\"neural_sparse\":{\"" + + embeddingField + + "\":{\"query_text\":\"" + + queryText + + "\",\"model_id\":\"" + + modelId + + "\"}}}" + + " }"; + } + + @Override + public String getType() { + return TYPE; + } + + public static class Factory extends AbstractRetrieverTool.Factory { + private static Factory INSTANCE; + + public static Factory getInstance() { + if (INSTANCE != null) { + return INSTANCE; + } + synchronized (NeuralSparseSearchTool.class) { + if (INSTANCE != null) { + return INSTANCE; + } + INSTANCE = new Factory(); + return INSTANCE; + } + } + + @Override + public NeuralSparseSearchTool create(Map params) { + String index = (String) params.get(INDEX_FIELD); + String embeddingField = (String) params.get(EMBEDDING_FIELD); + String[] sourceFields = gson.fromJson((String) params.get(SOURCE_FIELD), String[].class); + String modelId = (String) params.get(MODEL_ID_FIELD); + Integer docSize = params.containsKey(DOC_SIZE_FIELD) ? Integer.parseInt((String) params.get(DOC_SIZE_FIELD)) : DEFAULT_DOC_SIZE; + return NeuralSparseSearchTool + .builder() + .client(client) + .xContentRegistry(xContentRegistry) + .index(index) + .embeddingField(embeddingField) + .sourceFields(sourceFields) + .modelId(modelId) + .docSize(docSize) + .build(); + } + } +} diff --git a/src/main/java/org/opensearch/agent/tools/VectorDBTool.java b/src/main/java/org/opensearch/agent/tools/VectorDBTool.java new file mode 100644 index 00000000..428b9f14 --- /dev/null +++ b/src/main/java/org/opensearch/agent/tools/VectorDBTool.java @@ -0,0 +1,119 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.opensearch.ml.common.utils.StringUtils.gson; + +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.opensearch.client.Client; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.ml.common.spi.tools.ToolAnnotation; + +import lombok.Builder; +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j2; + +/** + * This tool supports neural search with embedding models and knn index. + */ +@Log4j2 +@Getter +@Setter +@ToolAnnotation(VectorDBTool.TYPE) +public class VectorDBTool extends AbstractRetrieverTool { + public static final String TYPE = "VectorDBTool"; + public static final String MODEL_ID_FIELD = "model_id"; + public static final String EMBEDDING_FIELD = "embedding_field"; + public static final String K_FIELD = "k"; + public static final Integer DEFAULT_K = 10; + + private String name = TYPE; + private String modelId; + private String embeddingField; + private Integer k; + + @Builder + public VectorDBTool( + Client client, + NamedXContentRegistry xContentRegistry, + String index, + String embeddingField, + String[] sourceFields, + Integer docSize, + String modelId, + Integer k + ) { + super(client, xContentRegistry, index, sourceFields, docSize); + this.modelId = modelId; + this.embeddingField = embeddingField; + this.k = k; + } + + @Override + protected String getQueryBody(String queryText) { + if (StringUtils.isBlank(embeddingField) || StringUtils.isBlank(modelId)) { + throw new IllegalArgumentException( + "Parameter [" + EMBEDDING_FIELD + "] and [" + MODEL_ID_FIELD + "] can not be null or empty." + ); + } + return "{\"query\":{\"neural\":{\"" + + embeddingField + + "\":{\"query_text\":\"" + + queryText + + "\",\"model_id\":\"" + + modelId + + "\",\"k\":" + + k + + "}}}" + + " }"; + } + + @Override + public String getType() { + return TYPE; + } + + public static class Factory extends AbstractRetrieverTool.Factory { + private static VectorDBTool.Factory INSTANCE; + + public static VectorDBTool.Factory getInstance() { + if (INSTANCE != null) { + return INSTANCE; + } + synchronized (VectorDBTool.class) { + if (INSTANCE != null) { + return INSTANCE; + } + INSTANCE = new VectorDBTool.Factory(); + return INSTANCE; + } + } + + @Override + public VectorDBTool create(Map params) { + String index = (String) params.get(INDEX_FIELD); + String embeddingField = (String) params.get(EMBEDDING_FIELD); + String[] sourceFields = gson.fromJson((String) params.get(SOURCE_FIELD), String[].class); + String modelId = (String) params.get(MODEL_ID_FIELD); + Integer docSize = params.containsKey(DOC_SIZE_FIELD) ? Integer.parseInt((String) params.get(DOC_SIZE_FIELD)) : DEFAULT_DOC_SIZE; + Integer k = params.containsKey(K_FIELD) ? Integer.parseInt((String) params.get(K_FIELD)) : DEFAULT_K; + return VectorDBTool + .builder() + .client(client) + .xContentRegistry(xContentRegistry) + .index(index) + .embeddingField(embeddingField) + .sourceFields(sourceFields) + .modelId(modelId) + .docSize(docSize) + .k(k) + .build(); + } + } +} diff --git a/src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java b/src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java new file mode 100644 index 00000000..5e0faa9c --- /dev/null +++ b/src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java @@ -0,0 +1,183 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Client; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.search.SearchModule; + +import lombok.SneakyThrows; + +public class AbstractRetrieverToolTests { + static public final String TEST_QUERY = "{\"query\":{\"match_all\":{}}}"; + static public final String TEST_INDEX = "test index"; + static public final String[] TEST_SOURCE_FIELDS = new String[] { "test 1", "test 2" }; + static public final Integer TEST_DOC_SIZE = 3; + static public final NamedXContentRegistry TEST_XCONTENT_REGISTRY_FOR_QUERY = new NamedXContentRegistry( + new SearchModule(Settings.EMPTY, List.of()).getNamedXContents() + ); + + private String mockedSearchResponseString; + private String mockedEmptySearchResponseString; + private AbstractRetrieverTool mockedImpl; + + @Before + @SneakyThrows + public void setup() { + try (InputStream searchResponseIns = AbstractRetrieverTool.class.getResourceAsStream("retrieval_tool_search_response.json")) { + if (searchResponseIns != null) { + mockedSearchResponseString = new String(searchResponseIns.readAllBytes(), StandardCharsets.UTF_8); + } + } + try (InputStream searchResponseIns = AbstractRetrieverTool.class.getResourceAsStream("retrieval_tool_empty_search_response.json")) { + if (searchResponseIns != null) { + mockedEmptySearchResponseString = new String(searchResponseIns.readAllBytes(), StandardCharsets.UTF_8); + } + } + + mockedImpl = Mockito + .mock( + AbstractRetrieverTool.class, + Mockito + .withSettings() + .useConstructor(null, TEST_XCONTENT_REGISTRY_FOR_QUERY, TEST_INDEX, TEST_SOURCE_FIELDS, TEST_DOC_SIZE) + .defaultAnswer(Mockito.CALLS_REAL_METHODS) + ); + when(mockedImpl.getQueryBody(any(String.class))).thenReturn(TEST_QUERY); + } + + @Test + @SneakyThrows + public void testRunAsyncWithSearchResults() { + Client client = mock(Client.class); + SearchResponse mockedSearchResponse = SearchResponse + .fromXContent( + JsonXContent.jsonXContent + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, mockedSearchResponseString) + ); + doAnswer(invocation -> { + SearchRequest searchRequest = invocation.getArgument(0); + assertEquals((long) TEST_DOC_SIZE, (long) searchRequest.source().size()); + ActionListener listener = invocation.getArgument(1); + listener.onResponse(mockedSearchResponse); + return null; + }).when(client).search(any(), any()); + mockedImpl.setClient(client); + + final CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(r -> { future.complete(r); }, e -> { future.completeExceptionally(e); }); + + mockedImpl.run(Map.of(AbstractRetrieverTool.INPUT_FIELD, "hello world"), listener); + + future.join(); + assertEquals( + "{\"_index\":\"hybrid-index\",\"_source\":{\"passage_text\":\"Company test_mock have a history of 100 years.\"},\"_id\":\"1\",\"_score\":89.2917}\n" + + "{\"_index\":\"hybrid-index\",\"_source\":{\"passage_text\":\"the price of the api is 2$ per invokation\"},\"_id\":\"2\",\"_score\":0.10702579}\n", + future.get() + ); + } + + @Test + @SneakyThrows + public void testRunAsyncWithEmptySearchResponse() { + Client client = mock(Client.class); + SearchResponse mockedEmptySearchResponse = SearchResponse + .fromXContent( + JsonXContent.jsonXContent + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, mockedEmptySearchResponseString) + ); + doAnswer(invocation -> { + SearchRequest searchRequest = invocation.getArgument(0); + assertEquals((long) TEST_DOC_SIZE, (long) searchRequest.source().size()); + ActionListener listener = invocation.getArgument(1); + listener.onResponse(mockedEmptySearchResponse); + return null; + }).when(client).search(any(), any()); + mockedImpl.setClient(client); + + final CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(r -> { future.complete(r); }, e -> { future.completeExceptionally(e); }); + + mockedImpl.run(Map.of(AbstractRetrieverTool.INPUT_FIELD, "hello world"), listener); + + future.join(); + assertEquals("Can not get any match from search result.", future.get()); + } + + @Test + @SneakyThrows + public void testRunAsyncWithIllegalQueryThenListenerOnFailure() { + Client client = mock(Client.class); + mockedImpl.setClient(client); + + final CompletableFuture future1 = new CompletableFuture<>(); + ActionListener listener1 = ActionListener.wrap(future1::complete, future1::completeExceptionally); + mockedImpl.run(Map.of(AbstractRetrieverTool.INPUT_FIELD, ""), listener1); + + Exception exception1 = assertThrows(Exception.class, future1::join); + assertTrue(exception1.getCause() instanceof IllegalArgumentException); + assertEquals(exception1.getCause().getMessage(), "[input] is null or empty, can not process it."); + + final CompletableFuture future2 = new CompletableFuture<>(); + ActionListener listener2 = ActionListener.wrap(future2::complete, future2::completeExceptionally); + mockedImpl.run(Map.of(AbstractRetrieverTool.INPUT_FIELD, " "), listener2); + + Exception exception2 = assertThrows(Exception.class, future2::join); + assertTrue(exception2.getCause() instanceof IllegalArgumentException); + assertEquals(exception2.getCause().getMessage(), "[input] is null or empty, can not process it."); + + final CompletableFuture future3 = new CompletableFuture<>(); + ActionListener listener3 = ActionListener.wrap(future3::complete, future3::completeExceptionally); + mockedImpl.run(Map.of("test", "hello world"), listener3); + + Exception exception3 = assertThrows(Exception.class, future3::join); + assertTrue(exception3.getCause() instanceof IllegalArgumentException); + assertEquals(exception3.getCause().getMessage(), "[input] is null or empty, can not process it."); + + final CompletableFuture future4 = new CompletableFuture<>(); + ActionListener listener4 = ActionListener.wrap(future4::complete, future4::completeExceptionally); + mockedImpl.run(null, listener4); + + Exception exception4 = assertThrows(Exception.class, future4::join); + assertTrue(exception4.getCause() instanceof NullPointerException); + } + + @Test + @SneakyThrows + public void testValidate() { + assertTrue(mockedImpl.validate(Map.of(AbstractRetrieverTool.INPUT_FIELD, "hi"))); + assertFalse(mockedImpl.validate(Map.of(AbstractRetrieverTool.INPUT_FIELD, ""))); + assertFalse(mockedImpl.validate(Map.of(AbstractRetrieverTool.INPUT_FIELD, " "))); + assertFalse(mockedImpl.validate(Map.of("test", " "))); + assertFalse(mockedImpl.validate(new HashMap<>())); + assertFalse(mockedImpl.validate(null)); + } +} diff --git a/src/test/java/org/opensearch/agent/tools/NeuralSparseSearchToolTests.java b/src/test/java/org/opensearch/agent/tools/NeuralSparseSearchToolTests.java new file mode 100644 index 00000000..fac45f54 --- /dev/null +++ b/src/test/java/org/opensearch/agent/tools/NeuralSparseSearchToolTests.java @@ -0,0 +1,116 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; +import static org.opensearch.ml.common.utils.StringUtils.gson; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.Before; +import org.junit.Test; + +import com.google.gson.JsonSyntaxException; + +import lombok.SneakyThrows; + +public class NeuralSparseSearchToolTests { + public static final String TEST_QUERY_TEXT = "123fsd23134sdfouh"; + public static final String TEST_EMBEDDING_FIELD = "test embedding"; + public static final String TEST_MODEL_ID = "123fsd23134"; + private Map params = new HashMap<>(); + + @Before + public void setup() { + params.put(NeuralSparseSearchTool.INDEX_FIELD, AbstractRetrieverToolTests.TEST_INDEX); + params.put(NeuralSparseSearchTool.EMBEDDING_FIELD, TEST_EMBEDDING_FIELD); + params.put(NeuralSparseSearchTool.SOURCE_FIELD, gson.toJson(AbstractRetrieverToolTests.TEST_SOURCE_FIELDS)); + params.put(NeuralSparseSearchTool.MODEL_ID_FIELD, TEST_MODEL_ID); + params.put(NeuralSparseSearchTool.DOC_SIZE_FIELD, AbstractRetrieverToolTests.TEST_DOC_SIZE.toString()); + } + + @Test + @SneakyThrows + public void testCreateTool() { + NeuralSparseSearchTool tool = NeuralSparseSearchTool.Factory.getInstance().create(params); + assertEquals(AbstractRetrieverToolTests.TEST_INDEX, tool.getIndex()); + assertEquals(TEST_EMBEDDING_FIELD, tool.getEmbeddingField()); + assertEquals(AbstractRetrieverToolTests.TEST_SOURCE_FIELDS, tool.getSourceFields()); + assertEquals(TEST_MODEL_ID, tool.getModelId()); + assertEquals(AbstractRetrieverToolTests.TEST_DOC_SIZE, tool.getDocSize()); + assertEquals("NeuralSparseSearchTool", tool.getType()); + assertEquals("NeuralSparseSearchTool", tool.getName()); + assertEquals( + "Use this tool to search data in OpenSearch index.", + NeuralSparseSearchTool.Factory.getInstance().getDefaultDescription() + ); + } + + @Test + @SneakyThrows + public void testGetQueryBody() { + NeuralSparseSearchTool tool = NeuralSparseSearchTool.Factory.getInstance().create(params); + assertEquals( + "{\"query\":{\"neural_sparse\":{\"test embedding\":{\"" + + "query_text\":\"123fsd23134sdfouh\",\"model_id\":\"123fsd23134\"}}} }", + tool.getQueryBody(TEST_QUERY_TEXT) + ); + } + + @Test + @SneakyThrows + public void testGetQueryBodyWithIllegalParams() { + Map illegalParams1 = new HashMap<>(params); + illegalParams1.remove(NeuralSparseSearchTool.MODEL_ID_FIELD); + NeuralSparseSearchTool tool1 = NeuralSparseSearchTool.Factory.getInstance().create(illegalParams1); + Exception exception1 = assertThrows( + IllegalArgumentException.class, + () -> tool1.getQueryBody(AbstractRetrieverToolTests.TEST_QUERY) + ); + assertEquals("Parameter [embedding_field] and [model_id] can not be null or empty.", exception1.getMessage()); + + Map illegalParams2 = new HashMap<>(params); + illegalParams2.remove(NeuralSparseSearchTool.EMBEDDING_FIELD); + NeuralSparseSearchTool tool2 = NeuralSparseSearchTool.Factory.getInstance().create(illegalParams2); + Exception exception2 = assertThrows( + IllegalArgumentException.class, + () -> tool2.getQueryBody(AbstractRetrieverToolTests.TEST_QUERY) + ); + assertEquals("Parameter [embedding_field] and [model_id] can not be null or empty.", exception2.getMessage()); + } + + @Test + @SneakyThrows + public void testCreateToolsParseParams() { + assertThrows( + ClassCastException.class, + () -> NeuralSparseSearchTool.Factory.getInstance().create(Map.of(NeuralSparseSearchTool.INDEX_FIELD, 123)) + ); + + assertThrows( + ClassCastException.class, + () -> NeuralSparseSearchTool.Factory.getInstance().create(Map.of(NeuralSparseSearchTool.EMBEDDING_FIELD, 123)) + ); + + assertThrows( + ClassCastException.class, + () -> NeuralSparseSearchTool.Factory.getInstance().create(Map.of(NeuralSparseSearchTool.MODEL_ID_FIELD, 123)) + ); + + assertThrows( + JsonSyntaxException.class, + () -> NeuralSparseSearchTool.Factory.getInstance().create(Map.of(NeuralSparseSearchTool.SOURCE_FIELD, "123")) + ); + + // although it will be parsed as integer, but the parameters value should always be String + assertThrows( + ClassCastException.class, + () -> NeuralSparseSearchTool.Factory.getInstance().create(Map.of(NeuralSparseSearchTool.DOC_SIZE_FIELD, 123)) + ); + } +} diff --git a/src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java b/src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java new file mode 100644 index 00000000..cc67604f --- /dev/null +++ b/src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java @@ -0,0 +1,103 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; +import static org.opensearch.ml.common.utils.StringUtils.gson; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.Before; +import org.junit.Test; + +import com.google.gson.JsonSyntaxException; + +import lombok.SneakyThrows; + +public class VectorDBToolTests { + public static final String TEST_QUERY_TEXT = "123fsd23134sdfouh"; + public static final String TEST_EMBEDDING_FIELD = "test embedding"; + public static final String TEST_MODEL_ID = "123fsd23134"; + public static final Integer TEST_K = 123; + private Map params = new HashMap<>(); + + @Before + public void setup() { + params.put(VectorDBTool.INDEX_FIELD, AbstractRetrieverToolTests.TEST_INDEX); + params.put(VectorDBTool.EMBEDDING_FIELD, TEST_EMBEDDING_FIELD); + params.put(VectorDBTool.SOURCE_FIELD, gson.toJson(AbstractRetrieverToolTests.TEST_SOURCE_FIELDS)); + params.put(VectorDBTool.MODEL_ID_FIELD, TEST_MODEL_ID); + params.put(VectorDBTool.DOC_SIZE_FIELD, AbstractRetrieverToolTests.TEST_DOC_SIZE.toString()); + params.put(VectorDBTool.K_FIELD, TEST_K.toString()); + } + + @Test + @SneakyThrows + public void testCreateTool() { + VectorDBTool tool = VectorDBTool.Factory.getInstance().create(params); + assertEquals(AbstractRetrieverToolTests.TEST_INDEX, tool.getIndex()); + assertEquals(TEST_EMBEDDING_FIELD, tool.getEmbeddingField()); + assertEquals(AbstractRetrieverToolTests.TEST_SOURCE_FIELDS, tool.getSourceFields()); + assertEquals(TEST_MODEL_ID, tool.getModelId()); + assertEquals(AbstractRetrieverToolTests.TEST_DOC_SIZE, tool.getDocSize()); + assertEquals(TEST_K, tool.getK()); + assertEquals("VectorDBTool", tool.getType()); + assertEquals("VectorDBTool", tool.getName()); + assertEquals("Use this tool to search data in OpenSearch index.", VectorDBTool.Factory.getInstance().getDefaultDescription()); + } + + @Test + @SneakyThrows + public void testGetQueryBody() { + VectorDBTool tool = VectorDBTool.Factory.getInstance().create(params); + assertEquals( + "{\"query\":{\"neural\":{\"test embedding\":{\"" + + "query_text\":\"123fsd23134sdfouh\",\"model_id\":\"123fsd23134\",\"k\":123}}} }", + tool.getQueryBody(TEST_QUERY_TEXT) + ); + } + + @Test + @SneakyThrows + public void testGetQueryBodyWithIllegalParams() { + Map illegalParams1 = new HashMap<>(params); + illegalParams1.remove(VectorDBTool.MODEL_ID_FIELD); + VectorDBTool tool1 = VectorDBTool.Factory.getInstance().create(illegalParams1); + Exception exception1 = assertThrows( + IllegalArgumentException.class, + () -> tool1.getQueryBody(AbstractRetrieverToolTests.TEST_QUERY) + ); + assertEquals("Parameter [embedding_field] and [model_id] can not be null or empty.", exception1.getMessage()); + + Map illegalParams2 = new HashMap<>(params); + illegalParams2.remove(VectorDBTool.EMBEDDING_FIELD); + VectorDBTool tool2 = VectorDBTool.Factory.getInstance().create(illegalParams2); + Exception exception2 = assertThrows( + IllegalArgumentException.class, + () -> tool2.getQueryBody(AbstractRetrieverToolTests.TEST_QUERY) + ); + assertEquals("Parameter [embedding_field] and [model_id] can not be null or empty.", exception2.getMessage()); + } + + @Test + @SneakyThrows + public void testCreateToolsParseParams() { + assertThrows(ClassCastException.class, () -> VectorDBTool.Factory.getInstance().create(Map.of(VectorDBTool.INDEX_FIELD, 123))); + + assertThrows(ClassCastException.class, () -> VectorDBTool.Factory.getInstance().create(Map.of(VectorDBTool.EMBEDDING_FIELD, 123))); + + assertThrows(ClassCastException.class, () -> VectorDBTool.Factory.getInstance().create(Map.of(VectorDBTool.MODEL_ID_FIELD, 123))); + + assertThrows(JsonSyntaxException.class, () -> VectorDBTool.Factory.getInstance().create(Map.of(VectorDBTool.SOURCE_FIELD, "123"))); + + // although it will be parsed as integer, but the parameters value should always be String + assertThrows(ClassCastException.class, () -> VectorDBTool.Factory.getInstance().create(Map.of(VectorDBTool.DOC_SIZE_FIELD, 123))); + + assertThrows(ClassCastException.class, () -> VectorDBTool.Factory.getInstance().create(Map.of(VectorDBTool.K_FIELD, 123))); + } +} diff --git a/src/test/resources/org/opensearch/agent/tools/retrieval_tool_empty_search_response.json b/src/test/resources/org/opensearch/agent/tools/retrieval_tool_empty_search_response.json new file mode 100644 index 00000000..7ca6bfa7 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/retrieval_tool_empty_search_response.json @@ -0,0 +1,18 @@ +{ + "took": 4, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 0, + "relation": "eq" + }, + "max_score": null, + "hits": [] + } +} \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/retrieval_tool_search_response.json b/src/test/resources/org/opensearch/agent/tools/retrieval_tool_search_response.json new file mode 100644 index 00000000..7e66dd60 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/retrieval_tool_search_response.json @@ -0,0 +1,35 @@ +{ + "took": 201, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 2, + "relation": "eq" + }, + "max_score": 89.2917, + "hits": [ + { + "_index": "hybrid-index", + "_id": "1", + "_score": 89.2917, + "_source": { + "passage_text": "Company test_mock have a history of 100 years." + } + }, + { + "_index": "hybrid-index", + "_id": "2", + "_score": 0.10702579, + "_source": { + "passage_text": "the price of the api is 2$ per invokation" + } + } + ] + } +} \ No newline at end of file From e75e0823b9b0bdda927b6ffee07e257b481ee678 Mon Sep 17 00:00:00 2001 From: xinyual <74362153+xinyual@users.noreply.github.com> Date: Tue, 26 Dec 2023 17:09:17 +0800 Subject: [PATCH 006/119] add ut and parser (#64) Signed-off-by: xinyual --- build.gradle | 1 + .../org/opensearch/agent/tools/PPLTool.java | 82 +++-- .../opensearch/agent/tools/PPLToolTests.java | 279 ++++++++++++++++++ 3 files changed, 339 insertions(+), 23 deletions(-) create mode 100644 src/test/java/org/opensearch/agent/tools/PPLToolTests.java diff --git a/build.gradle b/build.gradle index 49411109..48bf0ecd 100644 --- a/build.gradle +++ b/build.gradle @@ -109,6 +109,7 @@ dependencies { compileOnly group: 'org.json', name: 'json', version: '20231013' compileOnly("com.google.guava:guava:32.1.3-jre") compileOnly group: 'org.apache.commons', name: 'commons-lang3', version: '3.10' + compileOnly group: 'org.apache.commons', name: 'commons-text', version: '1.10.0' // Plugin dependencies compileOnly group: 'org.opensearch', name:'opensearch-ml-client', version: "${version}" diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index 052c6d3c..6cedd08d 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -5,19 +5,20 @@ package org.opensearch.agent.tools; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.UncheckedIOException; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.Locale; +import java.util.List; import java.util.Map; import java.util.StringJoiner; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.text.StringSubstitutor; import org.json.JSONObject; import org.opensearch.action.ActionRequest; import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest; @@ -28,9 +29,6 @@ import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.core.action.ActionListener; import org.opensearch.core.action.ActionResponse; -import org.opensearch.core.common.io.stream.InputStreamStreamInput; -import org.opensearch.core.common.io.stream.OutputStreamStreamOutput; -import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.index.query.MatchAllQueryBuilder; import org.opensearch.ml.common.FunctionName; import org.opensearch.ml.common.dataset.remote.RemoteInferenceInputDataSet; @@ -58,6 +56,8 @@ import lombok.extern.log4j.Log4j2; @Log4j2 +@Setter +@Getter @ToolAnnotation(PPLTool.TYPE) public class PPLTool implements Tool { @@ -93,6 +93,9 @@ public PPLTool(Client client, String modelId, String contextPrompt) { public void run(Map parameters, ActionListener listener) { String indexName = parameters.get("index"); String question = parameters.get("question"); + if (StringUtils.isBlank(indexName) || StringUtils.isBlank(question)) { + throw new IllegalArgumentException("Parameter index and question can not be null or empty."); + } SearchRequest searchRequest = buildSearchRequest(indexName); GetMappingsRequest getMappingsRequest = buildGetMappingRequest(indexName); client.admin().indices().getMappings(getMappingsRequest, ActionListener.wrap(getMappingsResponse -> { @@ -114,7 +117,7 @@ public void run(Map parameters, ActionListener listener) ModelTensors modelTensors = modelTensorOutput.getMlModelOutputs().get(0); ModelTensor modelTensor = modelTensors.getMlModelTensors().get(0); Map dataAsMap = (Map) modelTensor.getDataAsMap(); - String ppl = dataAsMap.get("output"); + String ppl = parseOutput(dataAsMap.get("response"), indexName); JSONObject jsonContent = new JSONObject(ImmutableMap.of("query", ppl)); PPLQueryRequest pplQueryRequest = new PPLQueryRequest(ppl, jsonContent, null, "jdbc"); TransportPPLQueryRequest transportPPLQueryRequest = new TransportPPLQueryRequest(pplQueryRequest); @@ -226,6 +229,8 @@ private String constructTableInfo(SearchHit[] searchHits, Map fieldsToType = new HashMap<>(); extractNamesTypes(mappingSource, fieldsToType, ""); StringJoiner tableInfoJoiner = new StringJoiner("\n"); + List sortedKeys = new ArrayList<>(fieldsToType.keySet()); + Collections.sort(sortedKeys); if (searchHits.length > 0) { SearchHit hit = searchHits[0]; @@ -236,12 +241,12 @@ private String constructTableInfo(SearchHit[] searchHits, Map indexInfo = ImmutableMap.of("mappingInfo", tableInfo, "question", question, "indexName", indexName); + StringSubstitutor substitutor = new StringSubstitutor(indexInfo, "${indexInfo.", "}"); + String finalPrompt = substitutor.replace(contextPrompt); + return finalPrompt; } private void extractNamesTypes(Map mappingSource, Map fieldsToType, String prefix) { @@ -297,22 +305,50 @@ private static void extractSamples(Map sampleSource, Map ActionListener getPPLTransportActionListener(ActionListener listener) { - return ActionListener.wrap(r -> { listener.onResponse(fromActionResponse(r)); }, listener::onFailure); + return ActionListener.wrap(r -> { listener.onResponse(TransportPPLQueryResponse.fromActionResponse(r)); }, listener::onFailure); } - private static TransportPPLQueryResponse fromActionResponse(ActionResponse actionResponse) { - if (actionResponse instanceof TransportPPLQueryResponse) { - return (TransportPPLQueryResponse) actionResponse; + private Map extractFromChatParameters(Map parameters) { + if (parameters.containsKey("input")) { + try { + Map chatParameters = gson.fromJson(parameters.get("input"), Map.class); + parameters.putAll(chatParameters); + } finally { + return parameters; + } } + return parameters; + } + + private String parseOutput(String llmOutput, String indexName) { + String ppl; + Pattern pattern = Pattern.compile("((.|[\\r\\n])+?)"); // For ppl like source=a \n | fields b + Matcher matcher = pattern.matcher(llmOutput); + + if (matcher.find()) { + ppl = matcher.group(1).replaceAll("[\\r\\n]", "").replaceAll("ISNOTNULL", "isnotnull").trim(); + } else { // logic for only ppl returned + int sourceIndex = llmOutput.indexOf("source="); + if (sourceIndex != -1) { + llmOutput = llmOutput.substring(sourceIndex); - try (ByteArrayOutputStream baos = new ByteArrayOutputStream(); OutputStreamStreamOutput osso = new OutputStreamStreamOutput(baos)) { - actionResponse.writeTo(osso); - try (StreamInput input = new InputStreamStreamInput(new ByteArrayInputStream(baos.toByteArray()))) { - return new TransportPPLQueryResponse(input); + // Splitting the string at "|" + String[] lists = llmOutput.split("\\|"); + + // Modifying the first element + if (lists.length > 0) { + lists[0] = "source=" + indexName; + } + + // Joining the string back together + ppl = String.join("|", lists); + } else { + throw new IllegalArgumentException("The returned PPL: " + llmOutput + " has wrong format"); } - } catch (IOException e) { - throw new UncheckedIOException("failed to parse ActionResponse into TransportPPLQueryResponse", e); } - + ppl = ppl.replace("`", ""); + ppl = ppl.replaceAll("\\bSPAN\\(", "span("); + return ppl; } + } diff --git a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java new file mode 100644 index 00000000..54535841 --- /dev/null +++ b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java @@ -0,0 +1,279 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.junit.Assert.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.when; +import static org.opensearch.ml.common.utils.StringUtils.gson; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.apache.lucene.search.TotalHits; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.AdminClient; +import org.opensearch.client.Client; +import org.opensearch.client.IndicesAdminClient; +import org.opensearch.cluster.metadata.MappingMetadata; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.ml.common.output.model.MLResultDataType; +import org.opensearch.ml.common.output.model.ModelTensor; +import org.opensearch.ml.common.output.model.ModelTensorOutput; +import org.opensearch.ml.common.output.model.ModelTensors; +import org.opensearch.ml.common.spi.tools.Tool; +import org.opensearch.ml.common.transport.MLTaskResponse; +import org.opensearch.ml.common.transport.prediction.MLPredictionTaskAction; +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; +import org.opensearch.sql.plugin.transport.PPLQueryAction; +import org.opensearch.sql.plugin.transport.TransportPPLQueryResponse; + +import com.google.common.collect.ImmutableMap; + +import lombok.extern.log4j.Log4j2; + +@Log4j2 +public class PPLToolTests { + @Mock + private Client client; + @Mock + private AdminClient adminClient; + @Mock + private IndicesAdminClient indicesAdminClient; + @Mock + private GetMappingsResponse getMappingsResponse; + @Mock + private MappingMetadata mappingMetadata; + private Map mockedMappings; + private Map indexMappings; + + private SearchHits searchHits; + + private SearchHit hit; + @Mock + private SearchResponse searchResponse; + + private Map sampleMapping; + + @Mock + private MLTaskResponse mlTaskResponse; + @Mock + private ModelTensorOutput modelTensorOutput; + @Mock + private ModelTensors modelTensors; + + private ModelTensor modelTensor; + + private Map pplReturns; + + @Mock + private TransportPPLQueryResponse transportPPLQueryResponse; + + private String mockedIndexName = "demo"; + + private String pplResult = "ppl result"; + + @Before + public void setup() { + MockitoAnnotations.openMocks(this); + createMappings(); + // get mapping + when(mappingMetadata.getSourceAsMap()).thenReturn(indexMappings); + when(getMappingsResponse.getMappings()).thenReturn(mockedMappings); + when(client.admin()).thenReturn(adminClient); + when(adminClient.indices()).thenReturn(indicesAdminClient); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(getMappingsResponse); + return null; + }).when(indicesAdminClient).getMappings(any(), any()); + // mockedMappings (index name, mappingmetadata) + + // search result + + when(searchResponse.getHits()).thenReturn(searchHits); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(searchResponse); + return null; + }).when(client).search(any(), any()); + + initMLTensors(); + + when(transportPPLQueryResponse.getResult()).thenReturn(pplResult); + + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(transportPPLQueryResponse); + return null; + }).when(client).execute(eq(PPLQueryAction.INSTANCE), any(), any()); + + PPLTool.Factory.getInstance().init(client); + } + + @Test + public void testTool() { + Tool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + assertEquals(PPLTool.TYPE, tool.getName()); + + tool.run(ImmutableMap.of("index", "demo", "question", "demo"), ActionListener.wrap(executePPLResult -> { + Map returnResults = gson.fromJson(executePPLResult, Map.class); + assertEquals("ppl result", returnResults.get("executionResult")); + assertEquals("source=demo| head 1", returnResults.get("ppl")); + }, e -> { log.info(e); })); + + } + + @Test + public void testTool_withPPLTag() { + Tool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + assertEquals(PPLTool.TYPE, tool.getName()); + + pplReturns = Collections.singletonMap("response", "source=demo\n|\n\rhead 1"); + modelTensor = new ModelTensor("tensor", new Number[0], new long[0], MLResultDataType.STRING, null, null, pplReturns); + initMLTensors(); + + tool.run(ImmutableMap.of("index", "demo", "question", "demo"), ActionListener.wrap(executePPLResult -> { + Map returnResults = gson.fromJson(executePPLResult, Map.class); + assertEquals("ppl result", returnResults.get("executionResult")); + assertEquals("source=demo|head 1", returnResults.get("ppl")); + }, e -> { log.info(e); })); + + } + + @Test + public void testTool_getMappingFailure() { + Tool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + assertEquals(PPLTool.TYPE, tool.getName()); + Exception exception = new Exception("get mapping error"); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(exception); + return null; + }).when(indicesAdminClient).getMappings(any(), any()); + + tool + .run( + ImmutableMap.of("index", "demo", "question", "demo"), + ActionListener.wrap(ppl -> { assertEquals(pplResult, "ppl result"); }, e -> { + assertEquals("get mapping error", e.getMessage()); + }) + ); + } + + @Test + public void testTool_predictModelFailure() { + Tool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + assertEquals(PPLTool.TYPE, tool.getName()); + Exception exception = new Exception("predict model error"); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onFailure(exception); + return null; + }).when(client).execute(eq(MLPredictionTaskAction.INSTANCE), any(), any()); + + tool + .run( + ImmutableMap.of("index", "demo", "question", "demo"), + ActionListener.wrap(ppl -> { assertEquals(pplResult, "ppl result"); }, e -> { + assertEquals("predict model error", e.getMessage()); + }) + ); + } + + @Test + public void testTool_searchFailure() { + Tool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + assertEquals(PPLTool.TYPE, tool.getName()); + Exception exception = new Exception("search error"); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(exception); + return null; + }).when(client).search(any(), any()); + + tool + .run( + ImmutableMap.of("index", "demo", "question", "demo"), + ActionListener.wrap(ppl -> { assertEquals(pplResult, "ppl result"); }, e -> { + assertEquals("search error", e.getMessage()); + }) + ); + } + + @Test + public void testTool_executePPLFailure() { + Tool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + assertEquals(PPLTool.TYPE, tool.getName()); + Exception exception = new Exception("execute ppl error"); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onFailure(exception); + return null; + }).when(client).execute(eq(PPLQueryAction.INSTANCE), any(), any()); + + tool + .run( + ImmutableMap.of("index", "demo", "question", "demo"), + ActionListener.wrap(ppl -> { assertEquals(pplResult, "ppl result"); }, e -> { + assertEquals("execute ppl:source=demo| head 1, get error: execute ppl error", e.getMessage()); + }) + ); + } + + private void createMappings() { + indexMappings = new HashMap<>(); + indexMappings + .put( + "properties", + ImmutableMap + .of( + "demoFields", + ImmutableMap.of("type", "text"), + "demoNested", + ImmutableMap + .of( + "properties", + ImmutableMap.of("nest1", ImmutableMap.of("type", "text"), "nest2", ImmutableMap.of("type", "text")) + ) + ) + ); + mockedMappings = new HashMap<>(); + mockedMappings.put(mockedIndexName, mappingMetadata); + + BytesReference bytesArray = new BytesArray("{\"demoFields\":\"111\", \"demoNested\": {\"nest1\": \"222\", \"nest2\": \"333\"}}"); + hit = new SearchHit(1); + hit.sourceRef(bytesArray); + searchHits = new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); + pplReturns = Collections.singletonMap("response", "source=demo| head 1"); + modelTensor = new ModelTensor("tensor", new Number[0], new long[0], MLResultDataType.STRING, null, null, pplReturns); + + } + + private void initMLTensors(){ + when(modelTensors.getMlModelTensors()).thenReturn(Collections.singletonList(modelTensor)); + when(modelTensorOutput.getMlModelOutputs()).thenReturn(Collections.singletonList(modelTensors)); + when(mlTaskResponse.getOutput()).thenReturn(modelTensorOutput); + + // call model + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(mlTaskResponse); + return null; + }).when(client).execute(eq(MLPredictionTaskAction.INSTANCE), any(), any()); + } +} From 0ccb01976d4445261823a581f534b80f3c38cd86 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Tue, 26 Dec 2023 16:41:45 -0800 Subject: [PATCH 007/119] Add a search AD results tool (#52) (#67) (cherry picked from commit 18445e6c522155c4c405c476f6cd4eef4ef7e27e) Signed-off-by: Tyler Ohlsen Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../agent/tools/SearchAnomalyResultsTool.java | 210 ++++++++++++++++++ .../tools/SearchAnomalyResultsToolTests.java | 193 ++++++++++++++++ 2 files changed, 403 insertions(+) create mode 100644 src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java create mode 100644 src/test/java/org/opensearch/agent/tools/SearchAnomalyResultsToolTests.java diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java new file mode 100644 index 00000000..ef1a44dd --- /dev/null +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java @@ -0,0 +1,210 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.ad.client.AnomalyDetectionNodeClient; +import org.opensearch.client.Client; +import org.opensearch.core.action.ActionListener; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.ExistsQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.RangeQueryBuilder; +import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.ml.common.output.model.ModelTensors; +import org.opensearch.ml.common.spi.tools.Parser; +import org.opensearch.ml.common.spi.tools.Tool; +import org.opensearch.ml.common.spi.tools.ToolAnnotation; +import org.opensearch.search.SearchHit; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.search.sort.SortOrder; + +import lombok.Getter; +import lombok.Setter; + +@ToolAnnotation(SearchAnomalyResultsTool.TYPE) +public class SearchAnomalyResultsTool implements Tool { + public static final String TYPE = "SearchAnomalyResultsTool"; + private static final String DEFAULT_DESCRIPTION = "Use this tool to search anomaly results."; + + @Setter + @Getter + private String name = TYPE; + @Getter + @Setter + private String description = DEFAULT_DESCRIPTION; + + @Getter + private String version; + + private Client client; + + private AnomalyDetectionNodeClient adClient; + + @Setter + private Parser inputParser; + @Setter + private Parser outputParser; + + public SearchAnomalyResultsTool(Client client) { + this.client = client; + this.adClient = new AnomalyDetectionNodeClient(client); + + // probably keep this overridden output parser. need to ensure the output matches what's expected + outputParser = new Parser<>() { + @Override + public Object parse(Object o) { + @SuppressWarnings("unchecked") + List mlModelOutputs = (List) o; + return mlModelOutputs.get(0).getMlModelTensors().get(0).getDataAsMap().get("response"); + } + }; + } + + // Response is currently in a simple string format including the list of anomaly results (only detector ID, grade, confidence), + // and total # of results. The output will likely need to be updated, standardized, and include more fields in the + // future to cover a sufficient amount of potential questions the agent will need to handle. + @Override + public void run(Map parameters, ActionListener listener) { + final String detectorId = parameters.getOrDefault("detectorId", null); + final Boolean realTime = parameters.containsKey("realTime") ? Boolean.parseBoolean(parameters.get("realTime")) : null; + final Double anomalyGradeThreshold = parameters.containsKey("anomalyGradeThreshold") + ? Double.parseDouble(parameters.get("anomalyGradeThreshold")) + : null; + final Long dataStartTime = parameters.containsKey("dataStartTime") && StringUtils.isNumeric(parameters.get("dataStartTime")) + ? Long.parseLong(parameters.get("dataStartTime")) + : null; + final Long dataEndTime = parameters.containsKey("dataEndTime") && StringUtils.isNumeric(parameters.get("dataEndTime")) + ? Long.parseLong(parameters.get("dataEndTime")) + : null; + final String sortOrderStr = parameters.getOrDefault("sortOrder", "asc"); + final SortOrder sortOrder = sortOrderStr.equalsIgnoreCase("asc") ? SortOrder.ASC : SortOrder.DESC; + final String sortString = parameters.getOrDefault("sortString", "name.keyword"); + final int size = parameters.containsKey("size") ? Integer.parseInt(parameters.get("size")) : 20; + final int startIndex = parameters.containsKey("startIndex") ? Integer.parseInt(parameters.get("startIndex")) : 0; + + List mustList = new ArrayList(); + if (detectorId != null) { + mustList.add(new TermQueryBuilder("detector_id", detectorId)); + } + // We include or exclude the task ID if fetching historical or real-time results, respectively. + // For more details, see https://opensearch.org/docs/latest/observing-your-data/ad/api/#search-detector-result + if (realTime != null) { + BoolQueryBuilder boolQuery = new BoolQueryBuilder(); + ExistsQueryBuilder existsQuery = new ExistsQueryBuilder("task_id"); + if (realTime) { + boolQuery.mustNot(existsQuery); + } else { + boolQuery.must(existsQuery); + } + mustList.add(boolQuery); + } + if (anomalyGradeThreshold != null) { + mustList.add(new RangeQueryBuilder("anomaly_grade").gte(anomalyGradeThreshold)); + } + if (dataStartTime != null || dataEndTime != null) { + RangeQueryBuilder rangeQuery = new RangeQueryBuilder("anomaly_grade"); + if (dataStartTime != null) { + rangeQuery.gte(dataStartTime); + } + if (dataEndTime != null) { + rangeQuery.lte(dataEndTime); + } + mustList.add(rangeQuery); + } + + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + boolQueryBuilder.must().addAll(mustList); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .query(boolQueryBuilder) + .size(size) + .from(startIndex) + .sort(sortString, sortOrder); + + SearchRequest searchAnomalyResultsRequest = new SearchRequest().source(searchSourceBuilder); + + ActionListener searchAnomalyResultsListener = ActionListener.wrap(response -> { + StringBuilder sb = new StringBuilder(); + SearchHit[] hits = response.getHits().getHits(); + sb.append("AnomalyResults=["); + for (SearchHit hit : hits) { + sb.append("{"); + sb.append("detectorId=").append(hit.getSourceAsMap().get("detector_id")).append(","); + sb.append("grade=").append(hit.getSourceAsMap().get("anomaly_grade")).append(","); + sb.append("confidence=").append(hit.getSourceAsMap().get("confidence")); + sb.append("}"); + } + sb.append("]"); + sb.append("TotalAnomalyResults=").append(response.getHits().getTotalHits().value); + listener.onResponse((T) sb.toString()); + }, e -> { listener.onFailure(e); }); + + adClient.searchAnomalyResults(searchAnomalyResultsRequest, searchAnomalyResultsListener); + } + + @Override + public boolean validate(Map parameters) { + return true; + } + + @Override + public String getType() { + return TYPE; + } + + /** + * Factory for the {@link SearchAnomalyResultsTool} + */ + public static class Factory implements Tool.Factory { + private Client client; + + private AnomalyDetectionNodeClient adClient; + + private static Factory INSTANCE; + + /** + * Create or return the singleton factory instance + */ + public static Factory getInstance() { + if (INSTANCE != null) { + return INSTANCE; + } + synchronized (SearchAnomalyResultsTool.class) { + if (INSTANCE != null) { + return INSTANCE; + } + INSTANCE = new Factory(); + return INSTANCE; + } + } + + /** + * Initialize this factory + * @param client The OpenSearch client + */ + public void init(Client client) { + this.client = client; + this.adClient = new AnomalyDetectionNodeClient(client); + } + + @Override + public SearchAnomalyResultsTool create(Map map) { + return new SearchAnomalyResultsTool(client); + } + + @Override + public String getDefaultDescription() { + return DEFAULT_DESCRIPTION; + } + } + +} diff --git a/src/test/java/org/opensearch/agent/tools/SearchAnomalyResultsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAnomalyResultsToolTests.java new file mode 100644 index 00000000..c9d83de2 --- /dev/null +++ b/src/test/java/org/opensearch/agent/tools/SearchAnomalyResultsToolTests.java @@ -0,0 +1,193 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +import org.apache.lucene.search.TotalHits; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.opensearch.action.ActionType; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.search.SearchResponseSections; +import org.opensearch.client.AdminClient; +import org.opensearch.client.ClusterAdminClient; +import org.opensearch.client.IndicesAdminClient; +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.ml.common.spi.tools.Tool; +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; +import org.opensearch.search.aggregations.Aggregations; + +public class SearchAnomalyResultsToolTests { + @Mock + private NodeClient nodeClient; + @Mock + private AdminClient adminClient; + @Mock + private IndicesAdminClient indicesAdminClient; + @Mock + private ClusterAdminClient clusterAdminClient; + + private Map nullParams; + private Map emptyParams; + private Map nonEmptyParams; + + @Before + public void setup() { + MockitoAnnotations.openMocks(this); + SearchAnomalyResultsTool.Factory.getInstance().init(nodeClient); + + nullParams = null; + emptyParams = Collections.emptyMap(); + nonEmptyParams = Map.of("detectorId", "foo"); + } + + @Test + public void testParseParams() throws Exception { + Tool tool = SearchAnomalyResultsTool.Factory.getInstance().create(Collections.emptyMap()); + Map validParams = new HashMap(); + validParams.put("detectorId", "foo"); + validParams.put("realTime", "true"); + validParams.put("anomalyGradethreshold", "-1"); + validParams.put("dataStartTime", "1234"); + validParams.put("dataEndTime", "5678"); + validParams.put("sortOrder", "AsC"); + validParams.put("sortString", "foo.bar"); + validParams.put("size", "10"); + validParams.put("startIndex", "0"); + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + assertDoesNotThrow(() -> tool.run(validParams, listener)); + } + + @Test + public void testRunWithInvalidAnomalyGradeParam() throws Exception { + Tool tool = SearchAnomalyResultsTool.Factory.getInstance().create(Collections.emptyMap()); + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + assertThrows(NumberFormatException.class, () -> tool.run(Map.of("anomalyGradeThreshold", "foo"), listener)); + } + + @Test + public void testRunWithNoResults() throws Exception { + Tool tool = SearchAnomalyResultsTool.Factory.getInstance().create(Collections.emptyMap()); + + SearchHit[] hits = new SearchHit[0]; + + TotalHits totalHits = new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO); + + SearchResponse getResultsResponse = new SearchResponse( + new SearchResponseSections(new SearchHits(hits, totalHits, 0), new Aggregations(new ArrayList<>()), null, false, null, null, 0), + null, + 0, + 0, + 0, + 0, + null, + null + ); + String expectedResponseStr = String.format(Locale.getDefault(), "AnomalyResults=[]TotalAnomalyResults=%d", hits.length); + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + + doAnswer((invocation) -> { + ActionListener responseListener = invocation.getArgument(2); + responseListener.onResponse(getResultsResponse); + return null; + }).when(nodeClient).execute(any(ActionType.class), any(), any()); + + tool.run(emptyParams, listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertEquals(expectedResponseStr, responseCaptor.getValue()); + } + + @Test + public void testRunWithSingleResult() throws Exception { + final String detectorId = "detector-1-id"; + final double anomalyGrade = 0.5; + final double confidence = 0.9; + Tool tool = SearchAnomalyResultsTool.Factory.getInstance().create(Collections.emptyMap()); + + XContentBuilder content = XContentBuilder.builder(XContentType.JSON.xContent()); + content.startObject(); + content.field("detector_id", detectorId); + content.field("anomaly_grade", anomalyGrade); + content.field("confidence", confidence); + content.endObject(); + SearchHit[] hits = new SearchHit[1]; + hits[0] = new SearchHit(0, detectorId, null, null).sourceRef(BytesReference.bytes(content)); + + TotalHits totalHits = new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO); + + SearchResponse getResultsResponse = new SearchResponse( + new SearchResponseSections(new SearchHits(hits, totalHits, 0), new Aggregations(new ArrayList<>()), null, false, null, null, 0), + null, + 0, + 0, + 0, + 0, + null, + null + ); + String expectedResponseStr = String + .format( + "AnomalyResults=[{detectorId=%s,grade=%2.1f,confidence=%2.1f}]TotalAnomalyResults=%d", + detectorId, + anomalyGrade, + confidence, + hits.length + ); + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + + doAnswer((invocation) -> { + ActionListener responseListener = invocation.getArgument(2); + responseListener.onResponse(getResultsResponse); + return null; + }).when(nodeClient).execute(any(ActionType.class), any(), any()); + + tool.run(emptyParams, listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertEquals(expectedResponseStr, responseCaptor.getValue()); + } + + @Test + public void testValidate() { + Tool tool = SearchAnomalyResultsTool.Factory.getInstance().create(Collections.emptyMap()); + assertEquals(SearchAnomalyResultsTool.TYPE, tool.getType()); + assertTrue(tool.validate(emptyParams)); + assertTrue(tool.validate(nonEmptyParams)); + assertTrue(tool.validate(nullParams)); + } +} From 8f746aacdc827e9ac1c182f939360f6be7f1019b Mon Sep 17 00:00:00 2001 From: Mingshi Liu <113382730+mingshl@users.noreply.github.com> Date: Tue, 26 Dec 2023 17:29:13 -0800 Subject: [PATCH 008/119] increase AbstractRetrieverToolTests code coverage (#65) Signed-off-by: Mingshi Liu --- .../tools/AbstractRetrieverToolTests.java | 67 +++++++++++++++++-- 1 file changed, 60 insertions(+), 7 deletions(-) diff --git a/src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java b/src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java index 5e0faa9c..5f5803f0 100644 --- a/src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java @@ -5,14 +5,10 @@ package org.opensearch.agent.tools; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; +import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import static org.mockito.Mockito.*; +import static org.opensearch.agent.tools.AbstractRetrieverTool.DEFAULT_DESCRIPTION; import java.io.InputStream; import java.nio.charset.StandardCharsets; @@ -23,6 +19,7 @@ import org.junit.Before; import org.junit.Test; +import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; @@ -32,6 +29,7 @@ import org.opensearch.core.action.ActionListener; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.ml.common.spi.tools.Tool; import org.opensearch.search.SearchModule; import lombok.SneakyThrows; @@ -180,4 +178,59 @@ public void testValidate() { assertFalse(mockedImpl.validate(new HashMap<>())); assertFalse(mockedImpl.validate(null)); } + + @Test + public void testGetAttributes() { + assertEquals(mockedImpl.getVersion(), null); + assertEquals(mockedImpl.getIndex(), TEST_INDEX); + assertEquals(mockedImpl.getDocSize(), TEST_DOC_SIZE); + assertEquals(mockedImpl.getSourceFields(), TEST_SOURCE_FIELDS); + assertEquals(mockedImpl.getQueryBody(TEST_QUERY), TEST_QUERY); + } + + @Test + public void testGetQueryBodySuccess() { + assertEquals(mockedImpl.getQueryBody(TEST_QUERY), TEST_QUERY); + } + + @Test + @SneakyThrows + public void testRunWithRuntimeException() { + Client client = mock(Client.class); + mockedImpl.setClient(client); + ActionListener listener = mock(ActionListener.class); + doAnswer(invocation -> { + SearchRequest searchRequest = invocation.getArgument(0); + assertEquals((long) TEST_DOC_SIZE, (long) searchRequest.source().size()); + ActionListener actionListener = invocation.getArgument(1); + actionListener.onFailure(new RuntimeException("Failed to search index")); + return null; + }).when(client).search(any(), any()); + mockedImpl.run(Map.of(AbstractRetrieverTool.INPUT_FIELD, "hello world"), listener); + verify(listener).onFailure(any(RuntimeException.class)); + ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(Exception.class); + verify(listener).onFailure(argumentCaptor.capture()); + assertEquals("Failed to search index", argumentCaptor.getValue().getMessage()); + } + + @Test + public void testFactory() { + // Create a mock object of the abstract Factory class + Client client = mock(Client.class); + AbstractRetrieverTool.Factory factoryMock = new AbstractRetrieverTool.Factory<>() { + public PPLTool create(Map params) { + return null; + } + }; + + factoryMock.init(client, TEST_XCONTENT_REGISTRY_FOR_QUERY); + + assertNotNull(factoryMock.client); + assertNotNull(factoryMock.xContentRegistry); + assertEquals(client, factoryMock.client); + assertEquals(TEST_XCONTENT_REGISTRY_FOR_QUERY, factoryMock.xContentRegistry); + + String defaultDescription = factoryMock.getDefaultDescription(); + assertEquals(DEFAULT_DESCRIPTION, defaultDescription); + } } From 8aadce488ab63d202fd9c997f2c1f4da18d9b6c8 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 27 Dec 2023 14:57:09 +0800 Subject: [PATCH 009/119] [Backport 2.x] Fix backport branch name is not correct (#71) * Fix backport branch name is not correct (#69) Signed-off-by: Hailong Cui (cherry picked from commit 05c8fe087bf8aa0e61ddaaee2971bcb0b6ec9e28) Signed-off-by: github-actions[bot] * include fix in #70 Signed-off-by: zhichao-aws --------- Signed-off-by: Hailong Cui Signed-off-by: github-actions[bot] Signed-off-by: zhichao-aws Co-authored-by: github-actions[bot] Co-authored-by: zhichao-aws --- .github/workflows/backport.yml | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 56fef507..f4ddaa10 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -9,6 +9,17 @@ on: jobs: backport: runs-on: ubuntu-latest + # Only react to merged PRs for security reasons. + # See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target. + if: > + github.event.pull_request.merged + && ( + github.event.action == 'closed' + || ( + github.event.action == 'labeled' + && contains(github.event.label.name, 'backport') + ) + ) permissions: contents: write pull-requests: write @@ -26,6 +37,6 @@ jobs: uses: VachaShah/backport@v2.2.0 with: github_token: ${{ steps.github_app_token.outputs.token }} - branch_name: backport/backport-${{ github.event.number }} + head_template: backport/backport-<%= number %>-to-<%= base %> labels_template: "<%= JSON.stringify([...labels, 'autocut']) %>" failure_labels: "failed backport" From ebd330f2b0cf569b54e4d7fc017f744a070c60a9 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 27 Dec 2023 15:28:11 +0800 Subject: [PATCH 010/119] change plugin name add opensearch prefix (#72) (#73) (cherry picked from commit d2e9c7216d8900e8d7ee4b44e9d95591030d6623) Signed-off-by: zane-neo Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 48bf0ecd..2caa00f1 100644 --- a/build.gradle +++ b/build.gradle @@ -209,7 +209,7 @@ forbiddenApisTest.ignoreFailures = true opensearchplugin { - name 'skills' + name 'opensearch-skills' description 'OpenSearch Skills' classname 'org.opensearch.agent.ToolPlugin' extendedPlugins = ['opensearch-ml'] From cd54a225c8dcd5f223cf517c35338709396961d6 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 27 Dec 2023 15:34:45 +0800 Subject: [PATCH 011/119] Change job-scheduler plugin group to point to correct location (#56) (#60) * Change job-scheduler plguin group to point to correct localtion * Add publish task * add dependency task to publishToStagingRepo * Fix publishAllPublicationsToStagingRepository failure issue * add comment to JS plugin path --------- (cherry picked from commit 37bbd3b5a642e621f9a7373c9eddd599511f64fa) Signed-off-by: zane-neo Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- build.gradle | 3 ++- scripts/build.sh | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/build.gradle b/build.gradle index 2caa00f1..a6572106 100644 --- a/build.gradle +++ b/build.gradle @@ -120,7 +120,8 @@ dependencies { // ZipArchive dependencies used for integration tests zipArchive group: 'org.opensearch.plugin', name:'opensearch-ml-plugin', version: "${version}" - zipArchive group: 'org.opensearch.plugin', name:'opensearch-job-scheduler', version: "${version}" + //JS plugin is published to `org/opensearch` instead of `org/opensearch/plugin` under local maven repo: https://mvnrepository.com/artifact/org.opensearch/opensearch-job-scheduler. + zipArchive group: 'org.opensearch', name:'opensearch-job-scheduler', version: "${version}" zipArchive "org.opensearch.plugin:opensearch-anomaly-detection:${version}" zipArchive group: 'org.opensearch.plugin', name:'opensearch-sql-plugin', version: "${version}" diff --git a/scripts/build.sh b/scripts/build.sh index e0495d4a..3b20300e 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -65,7 +65,7 @@ fi [ -z "$OUTPUT" ] && OUTPUT=artifacts ./gradlew build -x test -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER -./gradlew publishShadowPublicationToMavenLocal -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER -./gradlew publishShadowPublicationToStagingRepository -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER +./gradlew publishToMavenLocal -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER +./gradlew publishPluginZipPublicationToZipStagingRepository -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER mkdir -p $OUTPUT/maven/org/opensearch cp -r ./build/local-staging-repo/org/opensearch/. $OUTPUT/maven/org/opensearch From b0db422128843c3393264f5bf2aca9c46a2ef1d1 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 27 Dec 2023 16:50:55 +0800 Subject: [PATCH 012/119] Add visualization tool (#41) (#68) * Visualization Tool * fix build failure due to forbiddenApis * Address review comments * spotlessApply * update default tool name * update number of visualization be dynamic --------- (cherry picked from commit 3774eb9e477d676b332c469dffc69720b9088d2b) Signed-off-by: Hailong Cui Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- build.gradle | 8 + .../java/org/opensearch/agent/ToolPlugin.java | 10 +- .../agent/tools/VisualizationsTool.java | 171 ++++++++++++++++++ .../agent/tools/VisualizationsToolTests.java | 161 +++++++++++++++++ .../opensearch/agent/tools/visualization.json | 58 ++++++ .../agent/tools/visualization_not_found.json | 18 ++ 6 files changed, 425 insertions(+), 1 deletion(-) create mode 100644 src/main/java/org/opensearch/agent/tools/VisualizationsTool.java create mode 100644 src/test/java/org/opensearch/agent/tools/VisualizationsToolTests.java create mode 100644 src/test/resources/org/opensearch/agent/tools/visualization.json create mode 100644 src/test/resources/org/opensearch/agent/tools/visualization_not_found.json diff --git a/build.gradle b/build.gradle index a6572106..7ad13b82 100644 --- a/build.gradle +++ b/build.gradle @@ -180,6 +180,14 @@ test { systemProperty 'tests.security.manager', 'false' } +jacocoTestReport { + dependsOn test + reports { + html.required = true // human readable + xml.required = true // for coverlay + } +} + spotless { if (JavaVersion.current() >= JavaVersion.VERSION_17) { // Spotless configuration for Java files diff --git a/src/main/java/org/opensearch/agent/ToolPlugin.java b/src/main/java/org/opensearch/agent/ToolPlugin.java index 8e3d0844..5ac1ce57 100644 --- a/src/main/java/org/opensearch/agent/ToolPlugin.java +++ b/src/main/java/org/opensearch/agent/ToolPlugin.java @@ -13,6 +13,7 @@ import org.opensearch.agent.tools.NeuralSparseSearchTool; import org.opensearch.agent.tools.PPLTool; import org.opensearch.agent.tools.VectorDBTool; +import org.opensearch.agent.tools.VisualizationsTool; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; @@ -56,6 +57,7 @@ public Collection createComponents( this.xContentRegistry = xContentRegistry; PPLTool.Factory.getInstance().init(client); + VisualizationsTool.Factory.getInstance().init(client); NeuralSparseSearchTool.Factory.getInstance().init(client, xContentRegistry); VectorDBTool.Factory.getInstance().init(client, xContentRegistry); return Collections.emptyList(); @@ -63,6 +65,12 @@ public Collection createComponents( @Override public List> getToolFactories() { - return List.of(PPLTool.Factory.getInstance(), NeuralSparseSearchTool.Factory.getInstance(), VectorDBTool.Factory.getInstance()); + return List + .of( + PPLTool.Factory.getInstance(), + NeuralSparseSearchTool.Factory.getInstance(), + VectorDBTool.Factory.getInstance(), + VisualizationsTool.Factory.getInstance() + ); } } diff --git a/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java b/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java new file mode 100644 index 00000000..31f5cf09 --- /dev/null +++ b/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java @@ -0,0 +1,171 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import java.util.Arrays; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; + +import org.opensearch.ExceptionsHelper; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Client; +import org.opensearch.client.Requests; +import org.opensearch.core.action.ActionListener; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.ml.common.spi.tools.Tool; +import org.opensearch.ml.common.spi.tools.ToolAnnotation; +import org.opensearch.search.SearchHits; +import org.opensearch.search.builder.SearchSourceBuilder; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Strings; + +import lombok.Builder; +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j2; + +@Log4j2 +@ToolAnnotation(VisualizationsTool.TYPE) +public class VisualizationsTool implements Tool { + public static final String NAME = "FindVisualizations"; + public static final String TYPE = "VisualizationTool"; + public static final String VERSION = "v1.0"; + + public static final String SAVED_OBJECT_TYPE = "visualization"; + + /** + * default number of visualizations returned + */ + private static final int DEFAULT_SIZE = 3; + private static final String DEFAULT_DESCRIPTION = + "Use this tool to find user created visualizations. This tool takes the visualization name as input and returns matching visualizations"; + @Setter + @Getter + private String description = DEFAULT_DESCRIPTION; + + @Getter + @Setter + private String name = NAME; + @Getter + @Setter + private String type = TYPE; + @Getter + private final String version = VERSION; + private final Client client; + @Getter + private final String index; + @Getter + private final int size; + + @Builder + public VisualizationsTool(Client client, String index, int size) { + this.client = client; + this.index = index; + this.size = size; + } + + @Override + public void run(Map parameters, ActionListener listener) { + BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); + boolQueryBuilder.must().add(QueryBuilders.termQuery("type", SAVED_OBJECT_TYPE)); + boolQueryBuilder.must().add(QueryBuilders.matchQuery(SAVED_OBJECT_TYPE + ".title", parameters.get("input"))); + + SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource().query(boolQueryBuilder); + searchSourceBuilder.from(0).size(3); + SearchRequest searchRequest = Requests.searchRequest(index).source(searchSourceBuilder); + + client.search(searchRequest, new ActionListener<>() { + @Override + public void onResponse(SearchResponse searchResponse) { + SearchHits hits = searchResponse.getHits(); + StringBuilder visBuilder = new StringBuilder(); + visBuilder.append("Title,Id\n"); + if (hits.getTotalHits().value > 0) { + Arrays.stream(hits.getHits()).forEach(h -> { + String id = trimIdPrefix(h.getId()); + Map visMap = (Map) h.getSourceAsMap().get(SAVED_OBJECT_TYPE); + String title = visMap.get("title"); + visBuilder.append(String.format(Locale.ROOT, "%s,%s\n", title, id)); + }); + + listener.onResponse((T) visBuilder.toString()); + } else { + listener.onResponse((T) "No Visualization found"); + } + } + + @Override + public void onFailure(Exception e) { + if (ExceptionsHelper.unwrapCause(e) instanceof IndexNotFoundException) { + listener.onResponse((T) "No Visualization found"); + } else { + listener.onFailure(e); + } + } + }); + } + + @VisibleForTesting + String trimIdPrefix(String id) { + id = Optional.ofNullable(id).orElse(""); + if (id.startsWith(SAVED_OBJECT_TYPE)) { + String prefix = String.format(Locale.ROOT, "%s:", SAVED_OBJECT_TYPE); + return id.substring(prefix.length()); + } + return id; + } + + @Override + public boolean validate(Map parameters) { + return parameters.containsKey("input") && !Strings.isNullOrEmpty(parameters.get("input")); + } + + public static class Factory implements Tool.Factory { + private Client client; + + private static VisualizationsTool.Factory INSTANCE; + + public static VisualizationsTool.Factory getInstance() { + if (INSTANCE != null) { + return INSTANCE; + } + synchronized (VisualizationsTool.class) { + if (INSTANCE != null) { + return INSTANCE; + } + INSTANCE = new VisualizationsTool.Factory(); + return INSTANCE; + } + } + + public void init(Client client) { + this.client = client; + } + + @Override + public VisualizationsTool create(Map params) { + String index = params.get("index") == null ? ".kibana" : (String) params.get("index"); + String sizeStr = params.get("size") == null ? "3" : (String) params.get("size"); + int size; + try { + size = Integer.parseInt(sizeStr); + } catch (NumberFormatException ignored) { + size = DEFAULT_SIZE; + } + return VisualizationsTool.builder().client(client).index(index).size(size).build(); + } + + @Override + public String getDefaultDescription() { + return DEFAULT_DESCRIPTION; + } + } +} diff --git a/src/test/java/org/opensearch/agent/tools/VisualizationsToolTests.java b/src/test/java/org/opensearch/agent/tools/VisualizationsToolTests.java new file mode 100644 index 00000000..9cd79ff9 --- /dev/null +++ b/src/test/java/org/opensearch/agent/tools/VisualizationsToolTests.java @@ -0,0 +1,161 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.Map; +import java.util.concurrent.CompletableFuture; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.ArgumentMatchers; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Client; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.ml.common.spi.tools.Tool; + +public class VisualizationsToolTests { + @Mock + private Client client; + + private String searchResponse = "{}"; + private String searchResponseNotFound = "{}"; + + @Before + public void setup() throws IOException { + MockitoAnnotations.openMocks(this); + VisualizationsTool.Factory.getInstance().init(client); + try (InputStream searchResponseIns = VisualizationsToolTests.class.getResourceAsStream("visualization.json")) { + if (searchResponseIns != null) { + searchResponse = new String(searchResponseIns.readAllBytes(), StandardCharsets.UTF_8); + } + } + try (InputStream searchResponseIns = VisualizationsToolTests.class.getResourceAsStream("visualization_not_found.json")) { + if (searchResponseIns != null) { + searchResponseNotFound = new String(searchResponseIns.readAllBytes(), StandardCharsets.UTF_8); + } + } + } + + @Test + public void testToolIndexName() { + VisualizationsTool tool1 = VisualizationsTool.Factory.getInstance().create(Collections.emptyMap()); + assertEquals(tool1.getIndex(), ".kibana"); + + VisualizationsTool tool2 = VisualizationsTool.Factory.getInstance().create(Map.of("index", "test-index")); + assertEquals(tool2.getIndex(), "test-index"); + } + + @Test + public void testNumberOfVisualizationReturned() { + VisualizationsTool tool1 = VisualizationsTool.Factory.getInstance().create(Collections.emptyMap()); + assertEquals(tool1.getSize(), 3); + + VisualizationsTool tool2 = VisualizationsTool.Factory.getInstance().create(Map.of("size", "1")); + assertEquals(tool2.getSize(), 1); + + VisualizationsTool tool3 = VisualizationsTool.Factory.getInstance().create(Map.of("size", "badString")); + assertEquals(tool3.getSize(), 3); + } + + @Test + public void testTrimPrefix() { + VisualizationsTool tool = VisualizationsTool.Factory.getInstance().create(Collections.emptyMap()); + assertEquals(tool.trimIdPrefix(null), ""); + assertEquals(tool.trimIdPrefix("abc"), "abc"); + assertEquals(tool.trimIdPrefix("visualization:abc"), "abc"); + } + + @Test + public void testParameterValidation() { + VisualizationsTool tool = VisualizationsTool.Factory.getInstance().create(Collections.emptyMap()); + Assert.assertFalse(tool.validate(Collections.emptyMap())); + Assert.assertFalse(tool.validate(Map.of("input", ""))); + Assert.assertTrue(tool.validate(Map.of("input", "question"))); + } + + @Test + public void testRunToolWithVisualizationFound() throws Exception { + Tool tool = VisualizationsTool.Factory.getInstance().create(Collections.emptyMap()); + final CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + ArgumentCaptor> searchResponseListener = ArgumentCaptor.forClass(ActionListener.class); + Mockito.doNothing().when(client).search(ArgumentMatchers.any(SearchRequest.class), searchResponseListener.capture()); + + Map params = Map.of("input", "Sales by gender"); + + tool.run(params, listener); + + SearchResponse response = SearchResponse + .fromXContent( + JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, searchResponse) + ); + searchResponseListener.getValue().onResponse(response); + + future.join(); + assertEquals("Title,Id\n[Ecommerce]Sales by gender,aeb212e0-4c84-11e8-b3d7-01146121b73d\n", future.get()); + } + + @Test + public void testRunToolWithNoVisualizationFound() throws Exception { + Tool tool = VisualizationsTool.Factory.getInstance().create(Collections.emptyMap()); + final CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + ArgumentCaptor> searchResponseListener = ArgumentCaptor.forClass(ActionListener.class); + Mockito.doNothing().when(client).search(ArgumentMatchers.any(SearchRequest.class), searchResponseListener.capture()); + + Map params = Map.of("input", "Sales by gender"); + + tool.run(params, listener); + + SearchResponse response = SearchResponse + .fromXContent( + JsonXContent.jsonXContent + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, searchResponseNotFound) + ); + searchResponseListener.getValue().onResponse(response); + + future.join(); + assertEquals("No Visualization found", future.get()); + } + + @Test + public void testRunToolWithIndexNotExists() throws Exception { + Tool tool = VisualizationsTool.Factory.getInstance().create(Collections.emptyMap()); + final CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + ArgumentCaptor> searchResponseListener = ArgumentCaptor.forClass(ActionListener.class); + Mockito.doNothing().when(client).search(ArgumentMatchers.any(SearchRequest.class), searchResponseListener.capture()); + + Map params = Map.of("input", "Sales by gender"); + + tool.run(params, listener); + + IndexNotFoundException notFoundException = new IndexNotFoundException("test-index"); + searchResponseListener.getValue().onFailure(notFoundException); + + future.join(); + assertEquals("No Visualization found", future.get()); + } +} diff --git a/src/test/resources/org/opensearch/agent/tools/visualization.json b/src/test/resources/org/opensearch/agent/tools/visualization.json new file mode 100644 index 00000000..8901706e --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/visualization.json @@ -0,0 +1,58 @@ +{ + "took": 4, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 0.2847877, + "hits": [ + { + "_index": ".kibana_1", + "_id": "visualization:aeb212e0-4c84-11e8-b3d7-01146121b73d", + "_score": 0.2847877, + "_source": { + "visualization": { + "title": "[Ecommerce]Sales by gender", + "visState": "", + "uiStateJSON": "{}", + "description": "", + "version": 1, + "kibanaSavedObjectMeta": { + "searchSourceJSON": "{}" + } + }, + "type": "visualization", + "references": [ + { + "name": "control_0_index_pattern", + "type": "index-pattern", + "id": "d3d7af60-4c81-11e8-b3d7-01146121b73d" + }, + { + "name": "control_1_index_pattern", + "type": "index-pattern", + "id": "d3d7af60-4c81-11e8-b3d7-01146121b73d" + }, + { + "name": "control_2_index_pattern", + "type": "index-pattern", + "id": "d3d7af60-4c81-11e8-b3d7-01146121b73d" + } + ], + "migrationVersion": { + "visualization": "7.10.0" + }, + "updated_at": "2023-11-10T02:50:24.881Z" + } + } + ] + } +} diff --git a/src/test/resources/org/opensearch/agent/tools/visualization_not_found.json b/src/test/resources/org/opensearch/agent/tools/visualization_not_found.json new file mode 100644 index 00000000..40a0e9d3 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/visualization_not_found.json @@ -0,0 +1,18 @@ +{ + "took": 1, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 0, + "relation": "eq" + }, + "max_score": null, + "hits": [] + } +} From 0d0c1c110528c99f2564b7ca6b84bee388615d93 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Thu, 28 Dec 2023 12:50:27 -0800 Subject: [PATCH 013/119] Migrate alerting tools (#66) (#76) (cherry picked from commit 2f06f6f0b9a29e719d7bb03ec731f9036996d109) Signed-off-by: Tyler Ohlsen Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- build.gradle | 2 + .../agent/tools/SearchAlertsTool.java | 182 +++++++++++++ .../tools/SearchAnomalyDetectorsTool.java | 7 +- .../agent/tools/SearchMonitorsTool.java | 245 +++++++++++++++++ .../agent/tools/SearchAlertsToolTests.java | 198 ++++++++++++++ .../agent/tools/SearchMonitorsToolTests.java | 256 ++++++++++++++++++ 6 files changed, 889 insertions(+), 1 deletion(-) create mode 100644 src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java create mode 100644 src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java create mode 100644 src/test/java/org/opensearch/agent/tools/SearchAlertsToolTests.java create mode 100644 src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java diff --git a/build.gradle b/build.gradle index 7ad13b82..658942a8 100644 --- a/build.gradle +++ b/build.gradle @@ -12,6 +12,7 @@ buildscript { opensearch_version = System.getProperty("opensearch.version", "2.12.0-SNAPSHOT") isSnapshot = "true" == System.getProperty("build.snapshot", "true") buildVersionQualifier = System.getProperty("build.version_qualifier", "") + kotlin_version = System.getProperty("kotlin.version", "1.8.21") } repositories { @@ -117,6 +118,7 @@ dependencies { implementation fileTree(dir: adJarDirectory, include: ["opensearch-anomaly-detection-${version}.jar"]) implementation fileTree(dir: sqlJarDirectory, include: ["opensearch-sql-${version}.jar", "ppl-${version}.jar", "protocol-${version}.jar"]) compileOnly "org.opensearch:common-utils:${version}" + compileOnly "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}" // ZipArchive dependencies used for integration tests zipArchive group: 'org.opensearch.plugin', name:'opensearch-ml-plugin', version: "${version}" diff --git a/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java new file mode 100644 index 00000000..3ade5b33 --- /dev/null +++ b/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java @@ -0,0 +1,182 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.opensearch.ml.common.utils.StringUtils.gson; + +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.opensearch.client.Client; +import org.opensearch.client.node.NodeClient; +import org.opensearch.commons.alerting.AlertingPluginInterface; +import org.opensearch.commons.alerting.action.GetAlertsRequest; +import org.opensearch.commons.alerting.action.GetAlertsResponse; +import org.opensearch.commons.alerting.model.Alert; +import org.opensearch.commons.alerting.model.Table; +import org.opensearch.core.action.ActionListener; +import org.opensearch.ml.common.output.model.ModelTensors; +import org.opensearch.ml.common.spi.tools.Parser; +import org.opensearch.ml.common.spi.tools.Tool; +import org.opensearch.ml.common.spi.tools.ToolAnnotation; + +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j2; + +@Log4j2 +@ToolAnnotation(SearchAlertsTool.TYPE) +public class SearchAlertsTool implements Tool { + public static final String TYPE = "SearchAlertsTool"; + private static final String DEFAULT_DESCRIPTION = "Use this tool to search alerts."; + + @Setter + @Getter + private String name = TYPE; + @Getter + @Setter + private String description = DEFAULT_DESCRIPTION; + @Getter + private String type; + @Getter + private String version; + + private Client client; + @Setter + private Parser inputParser; + @Setter + private Parser outputParser; + + public SearchAlertsTool(Client client) { + this.client = client; + + // probably keep this overridden output parser. need to ensure the output matches what's expected + outputParser = new Parser<>() { + @Override + public Object parse(Object o) { + @SuppressWarnings("unchecked") + List mlModelOutputs = (List) o; + return mlModelOutputs.get(0).getMlModelTensors().get(0).getDataAsMap().get("response"); + } + }; + } + + @Override + public void run(Map parameters, ActionListener listener) { + final String tableSortOrder = parameters.getOrDefault("sortOrder", "asc"); + final String tableSortString = parameters.getOrDefault("sortString", "monitor_name.keyword"); + final int tableSize = parameters.containsKey("size") && StringUtils.isNumeric(parameters.get("size")) + ? Integer.parseInt(parameters.get("size")) + : 20; + final int startIndex = parameters.containsKey("startIndex") && StringUtils.isNumeric(parameters.get("startIndex")) + ? Integer.parseInt(parameters.get("startIndex")) + : 0; + final String searchString = parameters.getOrDefault("searchString", null); + + // not exposing "missing" from the table, using default of null + final Table table = new Table(tableSortOrder, tableSortString, null, tableSize, startIndex, searchString); + + final String severityLevel = parameters.getOrDefault("severityLevel", "ALL"); + final String alertState = parameters.getOrDefault("alertState", "ALL"); + final String monitorId = parameters.getOrDefault("monitorId", null); + final String alertIndex = parameters.getOrDefault("alertIndex", null); + @SuppressWarnings("unchecked") + final List monitorIds = parameters.containsKey("monitorIds") + ? gson.fromJson(parameters.get("monitorIds"), List.class) + : null; + @SuppressWarnings("unchecked") + final List workflowIds = parameters.containsKey("workflowIds") + ? gson.fromJson(parameters.get("workflowIds"), List.class) + : null; + @SuppressWarnings("unchecked") + final List alertIds = parameters.containsKey("alertIds") ? gson.fromJson(parameters.get("alertIds"), List.class) : null; + + GetAlertsRequest getAlertsRequest = new GetAlertsRequest( + table, + severityLevel, + alertState, + monitorId, + alertIndex, + monitorIds, + workflowIds, + alertIds + ); + + // create response listener + // stringify the response, may change to a standard format in the future + ActionListener getAlertsListener = ActionListener.wrap(response -> { + StringBuilder sb = new StringBuilder(); + sb.append("Alerts=["); + for (Alert alert : response.getAlerts()) { + sb.append(alert.toString()); + } + sb.append("]"); + sb.append("TotalAlerts=").append(response.getTotalAlerts()); + listener.onResponse((T) sb.toString()); + }, e -> { + log.error("Failed to search alerts.", e); + listener.onFailure(e); + }); + + // execute the search + AlertingPluginInterface.INSTANCE.getAlerts((NodeClient) client, getAlertsRequest, getAlertsListener); + } + + @Override + public boolean validate(Map parameters) { + return true; + } + + @Override + public String getType() { + return TYPE; + } + + /** + * Factory for the {@link SearchAlertsTool} + */ + public static class Factory implements Tool.Factory { + private Client client; + + private static Factory INSTANCE; + + /** + * Create or return the singleton factory instance + */ + public static Factory getInstance() { + if (INSTANCE != null) { + return INSTANCE; + } + synchronized (SearchAlertsTool.class) { + if (INSTANCE != null) { + return INSTANCE; + } + INSTANCE = new Factory(); + return INSTANCE; + } + } + + /** + * Initialize this factory + * @param client The OpenSearch client + */ + public void init(Client client) { + this.client = client; + } + + @Override + public SearchAlertsTool create(Map map) { + return new SearchAlertsTool(client); + } + + @Override + public String getDefaultDescription() { + return DEFAULT_DESCRIPTION; + } + } + +} diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java index 357668c9..de397521 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java @@ -30,7 +30,9 @@ import lombok.Getter; import lombok.Setter; +import lombok.extern.log4j.Log4j2; +@Log4j2 @ToolAnnotation(SearchAnomalyDetectorsTool.TYPE) public class SearchAnomalyDetectorsTool implements Tool { public static final String TYPE = "SearchAnomalyDetectorsTool"; @@ -140,7 +142,10 @@ public void run(Map parameters, ActionListener listener) sb.append("]"); sb.append("TotalAnomalyDetectors=").append(response.getHits().getTotalHits().value); listener.onResponse((T) sb.toString()); - }, e -> { listener.onFailure(e); }); + }, e -> { + log.error("Failed to search anomaly detectors.", e); + listener.onFailure(e); + }); adClient.searchAnomalyDetectors(searchDetectorRequest, searchDetectorListener); } diff --git a/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java new file mode 100644 index 00000000..21975080 --- /dev/null +++ b/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java @@ -0,0 +1,245 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.apache.lucene.search.join.ScoreMode; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Client; +import org.opensearch.client.node.NodeClient; +import org.opensearch.commons.alerting.AlertingPluginInterface; +import org.opensearch.commons.alerting.action.GetMonitorRequest; +import org.opensearch.commons.alerting.action.GetMonitorResponse; +import org.opensearch.commons.alerting.action.SearchMonitorRequest; +import org.opensearch.commons.alerting.model.Monitor; +import org.opensearch.core.action.ActionListener; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.ExistsQueryBuilder; +import org.opensearch.index.query.NestedQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.index.query.WildcardQueryBuilder; +import org.opensearch.ml.common.output.model.ModelTensors; +import org.opensearch.ml.common.spi.tools.Parser; +import org.opensearch.ml.common.spi.tools.Tool; +import org.opensearch.ml.common.spi.tools.ToolAnnotation; +import org.opensearch.rest.RestRequest; +import org.opensearch.search.SearchHit; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.search.sort.SortOrder; + +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j2; + +@Log4j2 +@ToolAnnotation(SearchMonitorsTool.TYPE) +public class SearchMonitorsTool implements Tool { + public static final String TYPE = "SearchMonitorsTool"; + private static final String DEFAULT_DESCRIPTION = "Use this tool to search alerting monitors."; + + @Setter + @Getter + private String name = TYPE; + @Getter + @Setter + private String description = DEFAULT_DESCRIPTION; + @Getter + private String type; + @Getter + private String version; + + private Client client; + @Setter + private Parser inputParser; + @Setter + private Parser outputParser; + + public SearchMonitorsTool(Client client) { + this.client = client; + + // probably keep this overridden output parser. need to ensure the output matches what's expected + outputParser = new Parser<>() { + @Override + public Object parse(Object o) { + @SuppressWarnings("unchecked") + List mlModelOutputs = (List) o; + return mlModelOutputs.get(0).getMlModelTensors().get(0).getDataAsMap().get("response"); + } + }; + } + + // Response is currently in a simple string format including the list of monitors (only name and ID attached), and + // number of total monitors. The output will likely need to be updated, standardized, and include more fields in the + // future to cover a sufficient amount of potential questions the agent will need to handle. + @Override + public void run(Map parameters, ActionListener listener) { + final String monitorId = parameters.getOrDefault("monitorId", null); + final String monitorName = parameters.getOrDefault("monitorName", null); + final String monitorNamePattern = parameters.getOrDefault("monitorNamePattern", null); + final Boolean enabled = parameters.containsKey("enabled") ? Boolean.parseBoolean(parameters.get("enabled")) : null; + final Boolean hasTriggers = parameters.containsKey("hasTriggers") ? Boolean.parseBoolean(parameters.get("hasTriggers")) : null; + final String indices = parameters.getOrDefault("indices", null); + final String sortOrderStr = parameters.getOrDefault("sortOrder", "asc"); + final SortOrder sortOrder = "asc".equalsIgnoreCase(sortOrderStr) ? SortOrder.ASC : SortOrder.DESC; + final String sortString = parameters.getOrDefault("sortString", "monitor.name.keyword"); + final int size = parameters.containsKey("size") && StringUtils.isNumeric(parameters.get("size")) + ? Integer.parseInt(parameters.get("size")) + : 20; + final int startIndex = parameters.containsKey("startIndex") && StringUtils.isNumeric(parameters.get("startIndex")) + ? Integer.parseInt(parameters.get("startIndex")) + : 0; + + // If a monitor ID is specified, all other params will be ignored. Simply return the monitor details based on that ID + // via the get monitor transport action + if (monitorId != null) { + GetMonitorRequest getMonitorRequest = new GetMonitorRequest(monitorId, 1L, RestRequest.Method.GET, null); + ActionListener getMonitorListener = ActionListener.wrap(response -> { + StringBuilder sb = new StringBuilder(); + Monitor monitor = response.getMonitor(); + if (monitor != null) { + sb.append("Monitors=["); + sb.append("{"); + sb.append("id=").append(monitor.getId()).append(","); + sb.append("name=").append(monitor.getName()); + sb.append("}]"); + sb.append("TotalMonitors=1"); + } else { + sb.append("Monitors=[]TotalMonitors=0"); + } + listener.onResponse((T) sb.toString()); + }, e -> { + log.error("Failed to search monitors.", e); + listener.onFailure(e); + }); + AlertingPluginInterface.INSTANCE.getMonitor((NodeClient) client, getMonitorRequest, getMonitorListener); + } else { + List mustList = new ArrayList(); + if (monitorName != null) { + mustList.add(new TermQueryBuilder("monitor.name.keyword", monitorName)); + } + if (monitorNamePattern != null) { + mustList.add(new WildcardQueryBuilder("monitor.name.keyword", monitorNamePattern)); + } + if (enabled != null) { + mustList.add(new TermQueryBuilder("monitor.enabled", enabled)); + } + if (hasTriggers != null) { + NestedQueryBuilder nestedTriggerQuery = new NestedQueryBuilder( + "monitor.triggers", + new ExistsQueryBuilder("monitor.triggers"), + ScoreMode.None + ); + + BoolQueryBuilder triggerQuery = new BoolQueryBuilder(); + if (hasTriggers) { + triggerQuery.must(nestedTriggerQuery); + } else { + triggerQuery.mustNot(nestedTriggerQuery); + } + mustList.add(triggerQuery); + } + if (indices != null) { + mustList + .add( + new NestedQueryBuilder( + "monitor.inputs", + new WildcardQueryBuilder("monitor.inputs.search.indices", indices), + ScoreMode.None + ) + ); + } + + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + boolQueryBuilder.must().addAll(mustList); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .query(boolQueryBuilder) + .size(size) + .from(startIndex) + .sort(sortString, sortOrder); + + SearchMonitorRequest searchMonitorRequest = new SearchMonitorRequest(new SearchRequest().source(searchSourceBuilder)); + + ActionListener searchMonitorListener = ActionListener.wrap(response -> { + StringBuilder sb = new StringBuilder(); + SearchHit[] hits = response.getHits().getHits(); + sb.append("Monitors=["); + for (SearchHit hit : hits) { + sb.append("{"); + sb.append("id=").append(hit.getId()).append(","); + sb.append("name=").append(hit.getSourceAsMap().get("name")); + sb.append("}"); + } + sb.append("]"); + sb.append("TotalMonitors=").append(response.getHits().getTotalHits().value); + listener.onResponse((T) sb.toString()); + }, e -> { + log.error("Failed to search monitors.", e); + listener.onFailure(e); + }); + AlertingPluginInterface.INSTANCE.searchMonitors((NodeClient) client, searchMonitorRequest, searchMonitorListener); + } + } + + @Override + public boolean validate(Map parameters) { + return true; + } + + @Override + public String getType() { + return TYPE; + } + + /** + * Factory for the {@link SearchMonitorsTool} + */ + public static class Factory implements Tool.Factory { + private Client client; + + private static Factory INSTANCE; + + /** + * Create or return the singleton factory instance + */ + public static Factory getInstance() { + if (INSTANCE != null) { + return INSTANCE; + } + synchronized (SearchMonitorsTool.class) { + if (INSTANCE != null) { + return INSTANCE; + } + INSTANCE = new Factory(); + return INSTANCE; + } + } + + /** + * Initialize this factory + * @param client The OpenSearch client + */ + public void init(Client client) { + this.client = client; + } + + @Override + public SearchMonitorsTool create(Map map) { + return new SearchMonitorsTool(client); + } + + @Override + public String getDefaultDescription() { + return DEFAULT_DESCRIPTION; + } + } + +} diff --git a/src/test/java/org/opensearch/agent/tools/SearchAlertsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAlertsToolTests.java new file mode 100644 index 00000000..ca1f8b99 --- /dev/null +++ b/src/test/java/org/opensearch/agent/tools/SearchAlertsToolTests.java @@ -0,0 +1,198 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +import java.time.Instant; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.opensearch.action.ActionType; +import org.opensearch.client.AdminClient; +import org.opensearch.client.ClusterAdminClient; +import org.opensearch.client.IndicesAdminClient; +import org.opensearch.client.node.NodeClient; +import org.opensearch.commons.alerting.action.GetAlertsResponse; +import org.opensearch.commons.alerting.model.Alert; +import org.opensearch.core.action.ActionListener; +import org.opensearch.ml.common.spi.tools.Tool; + +public class SearchAlertsToolTests { + @Mock + private NodeClient nodeClient; + @Mock + private AdminClient adminClient; + @Mock + private IndicesAdminClient indicesAdminClient; + @Mock + private ClusterAdminClient clusterAdminClient; + + private Map nullParams; + private Map emptyParams; + private Map nonEmptyParams; + + @Before + public void setup() { + MockitoAnnotations.openMocks(this); + SearchAlertsTool.Factory.getInstance().init(nodeClient); + + nullParams = null; + emptyParams = Collections.emptyMap(); + nonEmptyParams = Map.of("searchString", "foo"); + } + + @Test + public void testRunWithNoAlerts() throws Exception { + Tool tool = SearchAlertsTool.Factory.getInstance().create(Collections.emptyMap()); + GetAlertsResponse getAlertsResponse = new GetAlertsResponse(Collections.emptyList(), 0); + String expectedResponseStr = "Alerts=[]TotalAlerts=0"; + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + + doAnswer((invocation) -> { + ActionListener responseListener = invocation.getArgument(2); + responseListener.onResponse(getAlertsResponse); + return null; + }).when(nodeClient).execute(any(ActionType.class), any(), any()); + + tool.run(nonEmptyParams, listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertEquals(expectedResponseStr, responseCaptor.getValue()); + } + + @Test + public void testRunWithAlerts() throws Exception { + Tool tool = SearchAlertsTool.Factory.getInstance().create(Collections.emptyMap()); + Alert alert1 = new Alert( + "alert-id-1", + 1234, + 1, + "monitor-id", + "workflow-id", + "workflow-name", + "monitor-name", + 1234, + null, + "trigger-id", + "trigger-name", + Collections.emptyList(), + Collections.emptyList(), + Alert.State.ACKNOWLEDGED, + Instant.now(), + null, + null, + null, + null, + Collections.emptyList(), + "test-severity", + Collections.emptyList(), + null, + null, + Collections.emptyList() + ); + Alert alert2 = new Alert( + "alert-id-2", + 1234, + 1, + "monitor-id", + "workflow-id", + "workflow-name", + "monitor-name", + 1234, + null, + "trigger-id", + "trigger-name", + Collections.emptyList(), + Collections.emptyList(), + Alert.State.ACKNOWLEDGED, + Instant.now(), + null, + null, + null, + null, + Collections.emptyList(), + "test-severity", + Collections.emptyList(), + null, + null, + Collections.emptyList() + ); + List mockAlerts = List.of(alert1, alert2); + + GetAlertsResponse getAlertsResponse = new GetAlertsResponse(mockAlerts, mockAlerts.size()); + String expectedResponseStr = new StringBuilder() + .append("Alerts=[") + .append(alert1.toString()) + .append(alert2.toString()) + .append("]TotalAlerts=2") + .toString(); + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + + doAnswer((invocation) -> { + ActionListener responseListener = invocation.getArgument(2); + responseListener.onResponse(getAlertsResponse); + return null; + }).when(nodeClient).execute(any(ActionType.class), any(), any()); + + tool.run(nonEmptyParams, listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertEquals(expectedResponseStr, responseCaptor.getValue()); + } + + @Test + public void testParseParams() throws Exception { + Tool tool = SearchAlertsTool.Factory.getInstance().create(Collections.emptyMap()); + Map validParams = new HashMap(); + validParams.put("sortOrder", "asc"); + validParams.put("sortString", "foo.bar"); + validParams.put("size", "10"); + validParams.put("startIndex", "0"); + validParams.put("searchString", "foo"); + validParams.put("severityLevel", "ALL"); + validParams.put("alertState", "ALL"); + validParams.put("monitorId", "foo"); + validParams.put("alertIndex", "foo"); + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + assertDoesNotThrow(() -> tool.run(validParams, listener)); + assertDoesNotThrow(() -> tool.run(Map.of("monitorIds", "[]"), listener)); + assertDoesNotThrow(() -> tool.run(Map.of("monitorIds", "[foo]"), listener)); + assertDoesNotThrow(() -> tool.run(Map.of("workflowIds", "[]"), listener)); + assertDoesNotThrow(() -> tool.run(Map.of("workflowIds", "[foo]"), listener)); + assertDoesNotThrow(() -> tool.run(Map.of("alertIds", "[]"), listener)); + assertDoesNotThrow(() -> tool.run(Map.of("alertIds", "[foo]"), listener)); + } + + @Test + public void testValidate() { + Tool tool = SearchAlertsTool.Factory.getInstance().create(Collections.emptyMap()); + assertEquals(SearchAlertsTool.TYPE, tool.getType()); + assertTrue(tool.validate(emptyParams)); + assertTrue(tool.validate(nonEmptyParams)); + assertTrue(tool.validate(nullParams)); + } +} diff --git a/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java new file mode 100644 index 00000000..37bc960f --- /dev/null +++ b/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java @@ -0,0 +1,256 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +import java.time.Instant; +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.apache.lucene.search.TotalHits; +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.opensearch.action.ActionType; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.search.SearchResponseSections; +import org.opensearch.client.AdminClient; +import org.opensearch.client.ClusterAdminClient; +import org.opensearch.client.IndicesAdminClient; +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.commons.alerting.action.GetMonitorResponse; +import org.opensearch.commons.alerting.model.CronSchedule; +import org.opensearch.commons.alerting.model.DataSources; +import org.opensearch.commons.alerting.model.Monitor; +import org.opensearch.commons.authuser.User; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.ml.common.spi.tools.Tool; +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; +import org.opensearch.search.aggregations.Aggregations; + +public class SearchMonitorsToolTests { + @Mock + private NodeClient nodeClient; + @Mock + private AdminClient adminClient; + @Mock + private IndicesAdminClient indicesAdminClient; + @Mock + private ClusterAdminClient clusterAdminClient; + + private Map nullParams; + private Map emptyParams; + private Map nonEmptyParams; + private Map monitorIdParams; + + private Monitor testMonitor; + + @Before + public void setup() { + MockitoAnnotations.openMocks(this); + SearchMonitorsTool.Factory.getInstance().init(nodeClient); + + nullParams = null; + emptyParams = Collections.emptyMap(); + nonEmptyParams = Map.of("monitorName", "foo"); + monitorIdParams = Map.of("monitorId", "foo"); + testMonitor = new Monitor( + "monitor-1-id", + 0L, + "monitor-1", + true, + new CronSchedule("31 * * * *", ZoneId.of("Asia/Kolkata"), null), + Instant.now(), + Instant.now(), + Monitor.MonitorType.QUERY_LEVEL_MONITOR, + new User("test-user", Collections.emptyList(), Collections.emptyList(), Collections.emptyList()), + 0, + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyMap(), + new DataSources(), + "" + ); + } + + @Test + public void testRunWithNoMonitors() throws Exception { + Tool tool = SearchMonitorsTool.Factory.getInstance().create(Collections.emptyMap()); + + SearchHit[] hits = new SearchHit[0]; + + TotalHits totalHits = new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO); + + SearchResponse getMonitorsResponse = new SearchResponse( + new SearchResponseSections(new SearchHits(hits, totalHits, 0), new Aggregations(new ArrayList<>()), null, false, null, null, 0), + null, + 0, + 0, + 0, + 0, + null, + null + ); + String expectedResponseStr = String.format("Monitors=[]TotalMonitors=%d", hits.length); + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + + doAnswer((invocation) -> { + ActionListener responseListener = invocation.getArgument(2); + responseListener.onResponse(getMonitorsResponse); + return null; + }).when(nodeClient).execute(any(ActionType.class), any(), any()); + + tool.run(emptyParams, listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertEquals(expectedResponseStr, responseCaptor.getValue()); + } + + @Test + public void testRunWithMonitorId() throws Exception { + Tool tool = SearchMonitorsTool.Factory.getInstance().create(Collections.emptyMap()); + + GetMonitorResponse getMonitorResponse = new GetMonitorResponse( + testMonitor.getId(), + 1L, + 2L, + 0L, + testMonitor, + Collections.emptyList() + ); + String expectedResponseStr = String + .format("Monitors=[{id=%s,name=%s}]TotalMonitors=%d", testMonitor.getId(), testMonitor.getName(), 1); + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + + doAnswer((invocation) -> { + ActionListener responseListener = invocation.getArgument(2); + responseListener.onResponse(getMonitorResponse); + return null; + }).when(nodeClient).execute(any(ActionType.class), any(), any()); + + tool.run(monitorIdParams, listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertEquals(expectedResponseStr, responseCaptor.getValue()); + } + + @Test + public void testRunWithMonitorIdNotFound() throws Exception { + Tool tool = SearchMonitorsTool.Factory.getInstance().create(Collections.emptyMap()); + + GetMonitorResponse responseWithNullMonitor = new GetMonitorResponse(testMonitor.getId(), 1L, 2L, 0L, null, Collections.emptyList()); + String expectedResponseStr = String.format("Monitors=[]TotalMonitors=0"); + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + + doAnswer((invocation) -> { + ActionListener responseListener = invocation.getArgument(2); + responseListener.onResponse(responseWithNullMonitor); + return null; + }).when(nodeClient).execute(any(ActionType.class), any(), any()); + + tool.run(monitorIdParams, listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertEquals(expectedResponseStr, responseCaptor.getValue()); + } + + @Test + public void testRunWithSingleMonitor() throws Exception { + Tool tool = SearchMonitorsTool.Factory.getInstance().create(Collections.emptyMap()); + + XContentBuilder content = XContentBuilder.builder(XContentType.JSON.xContent()); + content.startObject(); + content.field("type", "monitor"); + content.field("name", testMonitor.getName()); + content.endObject(); + SearchHit[] hits = new SearchHit[1]; + hits[0] = new SearchHit(0, testMonitor.getId(), null, null).sourceRef(BytesReference.bytes(content)); + + TotalHits totalHits = new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO); + + SearchResponse getMonitorsResponse = new SearchResponse( + new SearchResponseSections(new SearchHits(hits, totalHits, 0), new Aggregations(new ArrayList<>()), null, false, null, null, 0), + null, + 0, + 0, + 0, + 0, + null, + null + ); + String expectedResponseStr = String + .format("Monitors=[{id=%s,name=%s}]TotalMonitors=%d", testMonitor.getId(), testMonitor.getName(), hits.length); + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + + doAnswer((invocation) -> { + ActionListener responseListener = invocation.getArgument(2); + responseListener.onResponse(getMonitorsResponse); + return null; + }).when(nodeClient).execute(any(ActionType.class), any(), any()); + + tool.run(emptyParams, listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertEquals(expectedResponseStr, responseCaptor.getValue()); + } + + @Test + public void testParseParams() throws Exception { + Tool tool = SearchMonitorsTool.Factory.getInstance().create(Collections.emptyMap()); + Map validParams = new HashMap(); + validParams.put("monitorName", "foo"); + validParams.put("enabled", "true"); + validParams.put("hasTriggers", "true"); + validParams.put("indices", "bar"); + validParams.put("sortOrder", "ASC"); + validParams.put("sortString", "baz"); + validParams.put("size", "10"); + validParams.put("startIndex", "0"); + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + assertDoesNotThrow(() -> tool.run(validParams, listener)); + assertDoesNotThrow(() -> tool.run(Map.of("hasTriggers", "false"), listener)); + assertDoesNotThrow(() -> tool.run(Map.of("monitorNamePattern", "foo*"), listener)); + assertDoesNotThrow(() -> tool.run(Map.of("detectorId", "foo"), listener)); + assertDoesNotThrow(() -> tool.run(Map.of("sortOrder", "AsC"), listener)); + } + + @Test + public void testValidate() { + Tool tool = SearchMonitorsTool.Factory.getInstance().create(Collections.emptyMap()); + assertEquals(SearchMonitorsTool.TYPE, tool.getType()); + assertTrue(tool.validate(emptyParams)); + assertTrue(tool.validate(nonEmptyParams)); + assertTrue(tool.validate(monitorIdParams)); + assertTrue(tool.validate(nullParams)); + } +} From 813c55e0ad1278f0fbe2724ec42eef1de7bf53e5 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Thu, 28 Dec 2023 16:37:41 -0800 Subject: [PATCH 014/119] Add RAGTool (#78) (#79) * increase AbstractRetrieverToolTests code coverage * add RAGTool * Change exception handling from input field in RAGTool --------- (cherry picked from commit 24d5cf985dec87d2f0797759c2765a18d9d6cbd9) Signed-off-by: Mingshi Liu Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../org/opensearch/agent/tools/RAGTool.java | 279 +++++++++++++++ .../opensearch/agent/tools/RAGToolTests.java | 330 ++++++++++++++++++ 2 files changed, 609 insertions(+) create mode 100644 src/main/java/org/opensearch/agent/tools/RAGTool.java create mode 100644 src/test/java/org/opensearch/agent/tools/RAGToolTests.java diff --git a/src/main/java/org/opensearch/agent/tools/RAGTool.java b/src/main/java/org/opensearch/agent/tools/RAGTool.java new file mode 100644 index 00000000..7c9c26c5 --- /dev/null +++ b/src/main/java/org/opensearch/agent/tools/RAGTool.java @@ -0,0 +1,279 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.apache.commons.lang3.StringEscapeUtils.escapeJson; +import static org.opensearch.agent.tools.VectorDBTool.DEFAULT_K; +import static org.opensearch.ml.common.utils.StringUtils.gson; +import static org.opensearch.ml.common.utils.StringUtils.toJson; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.opensearch.action.ActionRequest; +import org.opensearch.client.Client; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.ml.common.FunctionName; +import org.opensearch.ml.common.dataset.remote.RemoteInferenceInputDataSet; +import org.opensearch.ml.common.input.MLInput; +import org.opensearch.ml.common.output.model.ModelTensor; +import org.opensearch.ml.common.output.model.ModelTensorOutput; +import org.opensearch.ml.common.output.model.ModelTensors; +import org.opensearch.ml.common.spi.tools.Parser; +import org.opensearch.ml.common.spi.tools.ToolAnnotation; +import org.opensearch.ml.common.transport.prediction.MLPredictionTaskAction; +import org.opensearch.ml.common.transport.prediction.MLPredictionTaskRequest; + +import com.google.gson.Gson; +import com.google.gson.JsonObject; + +import lombok.Builder; +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j2; + +/** + * This tool supports retrieving helpful information to optimize the output of the large language model to answer questions.. + */ +@Log4j2 +@Setter +@Getter +@ToolAnnotation(RAGTool.TYPE) +public class RAGTool extends AbstractRetrieverTool { + public static final String TYPE = "RAGTool"; + public static String DEFAULT_DESCRIPTION = + "Use this tool to retrieve helpful information to optimize the output of the large language model to answer questions."; + public static final String INFERENCE_MODEL_ID_FIELD = "inference_model_id"; + public static final String EMBEDDING_MODEL_ID_FIELD = "embedding_model_id"; + public static final String EMBEDDING_FIELD = "embedding_field"; + public static final String OUTPUT_FIELD = "output_field"; + private String name = TYPE; + private String description = DEFAULT_DESCRIPTION; + private Client client; + private String inferenceModelId; + private NamedXContentRegistry xContentRegistry; + private String index; + private String embeddingField; + private String[] sourceFields; + private String embeddingModelId; + private Integer docSize; + private Integer k; + @Setter + private Parser inputParser; + @Setter + private Parser outputParser; + + @Builder + public RAGTool( + Client client, + NamedXContentRegistry xContentRegistry, + String index, + String embeddingField, + String[] sourceFields, + Integer k, + Integer docSize, + String embeddingModelId, + String inferenceModelId + ) { + super(client, xContentRegistry, index, sourceFields, docSize); + this.client = client; + this.xContentRegistry = xContentRegistry; + this.index = index; + this.embeddingField = embeddingField; + this.sourceFields = sourceFields; + this.embeddingModelId = embeddingModelId; + this.docSize = docSize == null ? DEFAULT_DOC_SIZE : docSize; + this.k = k == null ? DEFAULT_K : k; + this.inferenceModelId = inferenceModelId; + + outputParser = new Parser() { + @Override + public Object parse(Object o) { + List mlModelOutputs = (List) o; + return mlModelOutputs.get(0).getMlModelTensors().get(0).getDataAsMap().get("response"); + } + }; + } + + // getQueryBody is not used in RAGTool + @Override + protected String getQueryBody(String queryText) { + return queryText; + } + + @Override + public void run(Map parameters, ActionListener listener) { + String input = null; + + if (!this.validate(parameters)) { + throw new IllegalArgumentException("[" + INPUT_FIELD + "] is null or empty, can not process it."); + } + + try { + String question = parameters.get(INPUT_FIELD); + input = gson.fromJson(question, String.class); + } catch (Exception e) { + log.error("Failed to read question from " + INPUT_FIELD, e); + listener.onFailure(new IllegalArgumentException("Failed to read question from " + INPUT_FIELD)); + return; + } + + Map params = new HashMap<>(); + VectorDBTool.Factory.getInstance().init(client, xContentRegistry); + params.put(VectorDBTool.INDEX_FIELD, this.index); + params.put(VectorDBTool.EMBEDDING_FIELD, this.embeddingField); + params.put(VectorDBTool.SOURCE_FIELD, gson.toJson(this.sourceFields)); + params.put(VectorDBTool.MODEL_ID_FIELD, this.embeddingModelId); + params.put(VectorDBTool.DOC_SIZE_FIELD, String.valueOf(this.docSize)); + params.put(VectorDBTool.K_FIELD, String.valueOf(this.k)); + VectorDBTool vectorDBTool = VectorDBTool.Factory.getInstance().create(params); + + String embeddingInput = input; + ActionListener actionListener = ActionListener.wrap(r -> { + T vectorDBToolOutput; + + if (r.equals("Can not get any match from search result.")) { + vectorDBToolOutput = (T) ""; + } else { + Gson gson = new Gson(); + String[] hits = r.toString().split("\n"); + + StringBuilder resultBuilder = new StringBuilder(); + for (String hit : hits) { + JsonObject jsonObject = gson.fromJson(hit, JsonObject.class); + String id = jsonObject.get("_id").getAsString(); + JsonObject source = jsonObject.getAsJsonObject("_source"); + + resultBuilder.append("_id: ").append(id).append("\n"); + resultBuilder.append("_source: ").append(source.toString()).append("\n"); + } + + vectorDBToolOutput = (T) gson.toJson(resultBuilder.toString()); + } + + Map tmpParameters = new HashMap<>(); + tmpParameters.putAll(parameters); + + if (vectorDBToolOutput instanceof List + && !((List) vectorDBToolOutput).isEmpty() + && ((List) vectorDBToolOutput).get(0) instanceof ModelTensors) { + ModelTensors tensors = (ModelTensors) ((List) vectorDBToolOutput).get(0); + Object response = tensors.getMlModelTensors().get(0).getDataAsMap().get("response"); + tmpParameters.put(OUTPUT_FIELD, response + ""); + } else if (vectorDBToolOutput instanceof ModelTensor) { + tmpParameters.put(OUTPUT_FIELD, escapeJson(toJson(((ModelTensor) vectorDBToolOutput).getDataAsMap()))); + } else { + if (vectorDBToolOutput instanceof String) { + tmpParameters.put(OUTPUT_FIELD, (String) vectorDBToolOutput); + } else { + tmpParameters.put(OUTPUT_FIELD, escapeJson(toJson(vectorDBToolOutput.toString()))); + } + } + + RemoteInferenceInputDataSet inputDataSet = RemoteInferenceInputDataSet.builder().parameters(tmpParameters).build(); + MLInput mlInput = MLInput.builder().algorithm(FunctionName.REMOTE).inputDataset(inputDataSet).build(); + ActionRequest request = new MLPredictionTaskRequest(inferenceModelId, mlInput, null); + + client.execute(MLPredictionTaskAction.INSTANCE, request, ActionListener.wrap(resp -> { + ModelTensorOutput modelTensorOutput = (ModelTensorOutput) resp.getOutput(); + modelTensorOutput.getMlModelOutputs(); + if (outputParser == null) { + listener.onResponse((T) modelTensorOutput.getMlModelOutputs()); + } else { + listener.onResponse((T) outputParser.parse(modelTensorOutput.getMlModelOutputs())); + } + }, e -> { + log.error("Failed to run model " + inferenceModelId, e); + listener.onFailure(e); + })); + }, e -> { + log.error("Failed to search index.", e); + listener.onFailure(e); + }); + vectorDBTool.run(Map.of(VectorDBTool.INPUT_FIELD, embeddingInput), actionListener); + + } + + @Override + public String getType() { + return TYPE; + } + + @Override + public String getName() { + return this.name; + } + + @Override + public void setName(String s) { + this.name = s; + } + + @Override + public boolean validate(Map parameters) { + if (parameters == null || parameters.size() == 0) { + return false; + } + String question = parameters.get(INPUT_FIELD); + return question != null && !question.trim().isEmpty(); + } + + /** + * Factory class to create RAGTool + */ + public static class Factory extends AbstractRetrieverTool.Factory { + private Client client; + private NamedXContentRegistry xContentRegistry; + + private static Factory INSTANCE; + + public static Factory getInstance() { + if (INSTANCE != null) { + return INSTANCE; + } + synchronized (RAGTool.class) { + if (INSTANCE != null) { + return INSTANCE; + } + INSTANCE = new Factory(); + return INSTANCE; + } + } + + public void init(Client client, NamedXContentRegistry xContentRegistry) { + this.client = client; + this.xContentRegistry = xContentRegistry; + } + + @Override + public RAGTool create(Map params) { + String embeddingModelId = (String) params.get(EMBEDDING_MODEL_ID_FIELD); + String index = (String) params.get(INDEX_FIELD); + String embeddingField = (String) params.get(EMBEDDING_FIELD); + String[] sourceFields = gson.fromJson((String) params.get(SOURCE_FIELD), String[].class); + String inferenceModelId = (String) params.get(INFERENCE_MODEL_ID_FIELD); + Integer docSize = params.containsKey(DOC_SIZE_FIELD) ? Integer.parseInt((String) params.get(DOC_SIZE_FIELD)) : 2; + return RAGTool + .builder() + .client(client) + .xContentRegistry(xContentRegistry) + .index(index) + .embeddingField(embeddingField) + .sourceFields(sourceFields) + .embeddingModelId(embeddingModelId) + .docSize(docSize) + .inferenceModelId(inferenceModelId) + .build(); + } + + @Override + public String getDefaultDescription() { + return DEFAULT_DESCRIPTION; + } + } +} diff --git a/src/test/java/org/opensearch/agent/tools/RAGToolTests.java b/src/test/java/org/opensearch/agent/tools/RAGToolTests.java new file mode 100644 index 00000000..79bfcebf --- /dev/null +++ b/src/test/java/org/opensearch/agent/tools/RAGToolTests.java @@ -0,0 +1,330 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.opensearch.agent.tools.AbstractRetrieverTool.*; +import static org.opensearch.agent.tools.AbstractRetrieverToolTests.*; +import static org.opensearch.agent.tools.VectorDBTool.DEFAULT_K; +import static org.opensearch.ml.common.utils.StringUtils.gson; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.*; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Client; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.ParseField; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.index.query.MatchAllQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.ml.common.output.model.ModelTensor; +import org.opensearch.ml.common.output.model.ModelTensorOutput; +import org.opensearch.ml.common.output.model.ModelTensors; +import org.opensearch.ml.common.spi.tools.Parser; +import org.opensearch.ml.common.transport.MLTaskResponse; +import org.opensearch.ml.common.transport.prediction.MLPredictionTaskAction; +import org.opensearch.ml.repackage.com.google.common.collect.ImmutableMap; + +import lombok.SneakyThrows; + +public class RAGToolTests { + public static final String TEST_QUERY_TEXT = "hello?"; + public static final String TEST_EMBEDDING_FIELD = "test_embedding"; + public static final String TEST_EMBEDDING_MODEL_ID = "1234"; + public static final String TEST_INFERENCE_MODEL_ID = "1234"; + + public static final String TEST_NEURAL_QUERY = "{\"query\":{\"neural\":{\"" + + TEST_EMBEDDING_FIELD + + "\":{\"query_text\":\"" + + TEST_QUERY_TEXT + + "\",\"model_id\":\"" + + TEST_EMBEDDING_MODEL_ID + + "\",\"k\":" + + DEFAULT_K + + "}}}" + + " }";; + private RAGTool ragTool; + private String mockedSearchResponseString; + private String mockedEmptySearchResponseString; + @Mock + private Parser mockOutputParser; + @Mock + private Client client; + @Mock + private ActionListener listener; + private Map params; + + @Before + @SneakyThrows + public void setup() { + try (InputStream searchResponseIns = AbstractRetrieverTool.class.getResourceAsStream("retrieval_tool_search_response.json")) { + if (searchResponseIns != null) { + mockedSearchResponseString = new String(searchResponseIns.readAllBytes(), StandardCharsets.UTF_8); + } + } + try (InputStream searchResponseIns = AbstractRetrieverTool.class.getResourceAsStream("retrieval_tool_empty_search_response.json")) { + if (searchResponseIns != null) { + mockedEmptySearchResponseString = new String(searchResponseIns.readAllBytes(), StandardCharsets.UTF_8); + } + } + + client = mock(Client.class); + listener = mock(ActionListener.class); + RAGTool.Factory.getInstance().init(client, TEST_XCONTENT_REGISTRY_FOR_QUERY); + + params = new HashMap<>(); + params.put(RAGTool.INDEX_FIELD, TEST_INDEX); + params.put(RAGTool.EMBEDDING_FIELD, TEST_EMBEDDING_FIELD); + params.put(RAGTool.SOURCE_FIELD, gson.toJson(TEST_SOURCE_FIELDS)); + params.put(RAGTool.EMBEDDING_MODEL_ID_FIELD, TEST_EMBEDDING_MODEL_ID); + params.put(RAGTool.INFERENCE_MODEL_ID_FIELD, TEST_INFERENCE_MODEL_ID); + params.put(RAGTool.DOC_SIZE_FIELD, AbstractRetrieverToolTests.TEST_DOC_SIZE.toString()); + params.put(VectorDBTool.K_FIELD, DEFAULT_K); + ragTool = RAGTool.Factory.getInstance().create(params); + } + + @Test + public void testValidate() { + assertTrue(ragTool.validate(Map.of(AbstractRetrieverTool.INPUT_FIELD, "hi"))); + assertFalse(ragTool.validate(Map.of(AbstractRetrieverTool.INPUT_FIELD, ""))); + assertFalse(ragTool.validate(Map.of(AbstractRetrieverTool.INPUT_FIELD, " "))); + assertFalse(ragTool.validate(Map.of("test", " "))); + assertFalse(ragTool.validate(new HashMap<>())); + assertFalse(ragTool.validate(null)); + } + + @Test + public void testGetAttributes() { + assertEquals(ragTool.getVersion(), null); + assertEquals(ragTool.getType(), RAGTool.TYPE); + assertEquals(ragTool.getIndex(), TEST_INDEX); + assertEquals(ragTool.getDocSize(), TEST_DOC_SIZE); + assertEquals(ragTool.getSourceFields(), TEST_SOURCE_FIELDS); + assertEquals(ragTool.getEmbeddingField(), TEST_EMBEDDING_FIELD); + assertEquals(ragTool.getEmbeddingModelId(), TEST_EMBEDDING_MODEL_ID); + assertEquals(ragTool.getK(), DEFAULT_K); + assertEquals(ragTool.getInferenceModelId(), TEST_INFERENCE_MODEL_ID); + } + + @Test + public void testSetName() { + assertEquals(ragTool.getName(), RAGTool.TYPE); + ragTool.setName("test-tool"); + assertEquals(ragTool.getName(), "test-tool"); + } + + @Test + public void testGetQueryBodySuccess() { + assertEquals(ragTool.getQueryBody(TEST_QUERY_TEXT), TEST_QUERY_TEXT); + } + + @Test + public void testOutputParser() throws IOException { + + NamedXContentRegistry mockNamedXContentRegistry = getNeuralQueryNamedXContentRegistry(); + ragTool.setXContentRegistry(mockNamedXContentRegistry); + + ModelTensorOutput mlModelTensorOutput = getMlModelTensorOutput(); + SearchResponse mockedSearchResponse = SearchResponse + .fromXContent( + JsonXContent.jsonXContent + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, mockedSearchResponseString) + ); + + doAnswer(invocation -> { + SearchRequest searchRequest = invocation.getArgument(0); + assertEquals((long) TEST_DOC_SIZE, (long) searchRequest.source().size()); + ActionListener listener = invocation.getArgument(1); + listener.onResponse(mockedSearchResponse); + return null; + }).when(client).search(any(), any()); + + doAnswer(invocation -> { + ActionListener actionListener = invocation.getArgument(2); + actionListener.onResponse(MLTaskResponse.builder().output(mlModelTensorOutput).build()); + return null; + }).when(client).execute(eq(MLPredictionTaskAction.INSTANCE), any(), any()); + + ragTool.setOutputParser(mockOutputParser); + ragTool.run(Map.of(INPUT_FIELD, "hello?"), listener); + + verify(client).search(any(), any()); + verify(client).execute(any(), any(), any()); + } + + @Test + public void testRunWithEmptySearchResponse() throws IOException { + NamedXContentRegistry mockNamedXContentRegistry = getNeuralQueryNamedXContentRegistry(); + ragTool.setXContentRegistry(mockNamedXContentRegistry); + + ModelTensorOutput mlModelTensorOutput = getMlModelTensorOutput(); + SearchResponse mockedEmptySearchResponse = SearchResponse + .fromXContent( + JsonXContent.jsonXContent + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, mockedEmptySearchResponseString) + ); + + doAnswer(invocation -> { + SearchRequest searchRequest = invocation.getArgument(0); + assertEquals((long) TEST_DOC_SIZE, (long) searchRequest.source().size()); + ActionListener listener = invocation.getArgument(1); + listener.onResponse(mockedEmptySearchResponse); + return null; + }).when(client).search(any(), any()); + + doAnswer(invocation -> { + ActionListener actionListener = invocation.getArgument(2); + actionListener.onResponse(MLTaskResponse.builder().output(mlModelTensorOutput).build()); + return null; + }).when(client).execute(eq(MLPredictionTaskAction.INSTANCE), any(), any()); + ragTool.run(Map.of(INPUT_FIELD, "hello?"), listener); + verify(client).search(any(), any()); + verify(client).execute(any(), any(), any()); + } + + @Test + @SneakyThrows + public void testRunWithRuntimeExceptionDuringSearch() { + NamedXContentRegistry mockNamedXContentRegistry = getNeuralQueryNamedXContentRegistry(); + ragTool.setXContentRegistry(mockNamedXContentRegistry); + doAnswer(invocation -> { + SearchRequest searchRequest = invocation.getArgument(0); + assertEquals((long) TEST_DOC_SIZE, (long) searchRequest.source().size()); + ActionListener actionListener = invocation.getArgument(1); + actionListener.onFailure(new RuntimeException("Failed to search index")); + return null; + }).when(client).search(any(), any()); + ragTool.run(Map.of(INPUT_FIELD, "hello?"), listener); + verify(listener).onFailure(any(RuntimeException.class)); + ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(Exception.class); + verify(listener).onFailure(argumentCaptor.capture()); + assertEquals("Failed to search index", argumentCaptor.getValue().getMessage()); + } + + @Test + @SneakyThrows + public void testRunWithRuntimeExceptionDuringExecute() { + NamedXContentRegistry mockNamedXContentRegistry = getNeuralQueryNamedXContentRegistry(); + ragTool.setXContentRegistry(mockNamedXContentRegistry); + + SearchResponse mockedSearchResponse = SearchResponse + .fromXContent( + JsonXContent.jsonXContent + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, mockedSearchResponseString) + ); + + doAnswer(invocation -> { + SearchRequest searchRequest = invocation.getArgument(0); + assertEquals((long) TEST_DOC_SIZE, (long) searchRequest.source().size()); + ActionListener listener = invocation.getArgument(1); + listener.onResponse(mockedSearchResponse); + return null; + }).when(client).search(any(), any()); + + doAnswer(invocation -> { + ActionListener actionListener = invocation.getArgument(2); + actionListener.onFailure(new RuntimeException("Failed to run model " + TEST_INFERENCE_MODEL_ID)); + return null; + }).when(client).execute(eq(MLPredictionTaskAction.INSTANCE), any(), any()); + + ragTool.run(Map.of(INPUT_FIELD, "hello?"), listener); + verify(listener).onFailure(any(RuntimeException.class)); + ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(Exception.class); + verify(listener).onFailure(argumentCaptor.capture()); + assertEquals("Failed to run model " + TEST_INFERENCE_MODEL_ID, argumentCaptor.getValue().getMessage()); + } + + @Test(expected = IllegalArgumentException.class) + public void testRunWithEmptyInput() { + ActionListener listener = mock(ActionListener.class); + ragTool.run(Map.of(INPUT_FIELD, ""), listener); + } + + @Test + public void testRunWithMalformedInput() throws IOException { + ActionListener listener = mock(ActionListener.class); + ragTool.run(Map.of(INPUT_FIELD, "{hello?"), listener); + verify(listener).onFailure(any(RuntimeException.class)); + ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(Exception.class); + verify(listener).onFailure(argumentCaptor.capture()); + assertEquals("Failed to read question from " + INPUT_FIELD, argumentCaptor.getValue().getMessage()); + + } + + @Test + public void testFactory() { + RAGTool.Factory factoryMock = new RAGTool.Factory(); + RAGTool.Factory.getInstance().init(client, TEST_XCONTENT_REGISTRY_FOR_QUERY); + factoryMock.init(client, TEST_XCONTENT_REGISTRY_FOR_QUERY); + + String defaultDescription = factoryMock.getDefaultDescription(); + assertEquals(RAGTool.DEFAULT_DESCRIPTION, defaultDescription); + assertNotNull(RAGTool.Factory.getInstance()); + RAGTool rAGtool1 = factoryMock.create(params); + + RAGTool rAGtool2 = new RAGTool( + client, + TEST_XCONTENT_REGISTRY_FOR_QUERY, + TEST_INDEX, + TEST_EMBEDDING_FIELD, + TEST_SOURCE_FIELDS, + DEFAULT_K, + TEST_DOC_SIZE, + TEST_EMBEDDING_MODEL_ID, + TEST_INFERENCE_MODEL_ID + ); + + assertEquals(rAGtool1.getClient(), rAGtool2.getClient()); + assertEquals(rAGtool1.getK(), rAGtool2.getK()); + assertEquals(rAGtool1.getInferenceModelId(), rAGtool2.getInferenceModelId()); + assertEquals(rAGtool1.getName(), rAGtool2.getName()); + assertEquals(rAGtool1.getDocSize(), rAGtool2.getDocSize()); + assertEquals(rAGtool1.getIndex(), rAGtool2.getIndex()); + assertEquals(rAGtool1.getEmbeddingModelId(), rAGtool2.getEmbeddingModelId()); + assertEquals(rAGtool1.getEmbeddingField(), rAGtool2.getEmbeddingField()); + assertEquals(rAGtool1.getSourceFields(), rAGtool2.getSourceFields()); + assertEquals(rAGtool1.getXContentRegistry(), rAGtool2.getXContentRegistry()); + + } + + private static NamedXContentRegistry getNeuralQueryNamedXContentRegistry() { + QueryBuilder matchAllQueryBuilder = new MatchAllQueryBuilder(); + + List entries = new ArrayList<>(); + NamedXContentRegistry.Entry entry = new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField("neural"), (p, c) -> { + p.map(); + return matchAllQueryBuilder; + }); + entries.add(entry); + NamedXContentRegistry mockNamedXContentRegistry = new NamedXContentRegistry(entries); + return mockNamedXContentRegistry; + } + + private static ModelTensorOutput getMlModelTensorOutput() { + ModelTensor modelTensor = ModelTensor.builder().dataAsMap(ImmutableMap.of("thought", "thought 1", "action", "action1")).build(); + ModelTensors modelTensors = ModelTensors.builder().mlModelTensors(Arrays.asList(modelTensor)).build(); + ModelTensorOutput mlModelTensorOutput = ModelTensorOutput.builder().mlModelOutputs(Arrays.asList(modelTensors)).build(); + return mlModelTensorOutput; + } +} From d027d60b2ee17a7c3c2309a8a87624de06acc2c2 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 29 Dec 2023 08:50:09 +0800 Subject: [PATCH 015/119] feat: search index tool (#61) (#77) * add search index tool * run spotless apply * remove unncessary string util operation * add test cases * spotless apply * update tool description and add model group search --------- (cherry picked from commit 2e053305ddf806e29c5913f31d4584dba1d8ad77) Signed-off-by: yuye-aws Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../java/org/opensearch/agent/ToolPlugin.java | 2 + .../agent/tools/AbstractRetrieverTool.java | 18 +- .../agent/tools/SearchIndexTool.java | 172 ++++++++++++++++++ .../tools/AbstractRetrieverToolTests.java | 4 +- .../agent/tools/SearchIndexToolTests.java | 169 +++++++++++++++++ .../tools/retrieval_tool_search_response.json | 2 +- 6 files changed, 357 insertions(+), 10 deletions(-) create mode 100644 src/main/java/org/opensearch/agent/tools/SearchIndexTool.java create mode 100644 src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java diff --git a/src/main/java/org/opensearch/agent/ToolPlugin.java b/src/main/java/org/opensearch/agent/ToolPlugin.java index 5ac1ce57..a411acbb 100644 --- a/src/main/java/org/opensearch/agent/ToolPlugin.java +++ b/src/main/java/org/opensearch/agent/ToolPlugin.java @@ -12,6 +12,7 @@ import org.opensearch.agent.tools.NeuralSparseSearchTool; import org.opensearch.agent.tools.PPLTool; +import org.opensearch.agent.tools.SearchIndexTool; import org.opensearch.agent.tools.VectorDBTool; import org.opensearch.agent.tools.VisualizationsTool; import org.opensearch.client.Client; @@ -60,6 +61,7 @@ public Collection createComponents( VisualizationsTool.Factory.getInstance().init(client); NeuralSparseSearchTool.Factory.getInstance().init(client, xContentRegistry); VectorDBTool.Factory.getInstance().init(client, xContentRegistry); + SearchIndexTool.Factory.getInstance().init(client, xContentRegistry); return Collections.emptyList(); } diff --git a/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java b/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java index dba48070..b2a0860c 100644 --- a/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java +++ b/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java @@ -66,6 +66,15 @@ protected AbstractRetrieverTool( protected abstract String getQueryBody(String queryText); + public static Map processResponse(SearchHit hit) { + Map docContent = new HashMap<>(); + docContent.put("_index", hit.getIndex()); + docContent.put("_id", hit.getId()); + docContent.put("_score", hit.getScore()); + docContent.put("_source", hit.getSourceAsMap()); + return docContent; + } + private SearchRequest buildSearchRequest(Map parameters) throws IOException { String question = parameters.get(INPUT_FIELD); if (StringUtils.isBlank(question)) { @@ -98,13 +107,8 @@ public void run(Map parameters, ActionListener listener) if (hits != null && hits.length > 0) { StringBuilder contextBuilder = new StringBuilder(); - for (int i = 0; i < hits.length; i++) { - SearchHit hit = hits[i]; - Map docContent = new HashMap<>(); - docContent.put("_index", hit.getIndex()); - docContent.put("_id", hit.getId()); - docContent.put("_score", hit.getScore()); - docContent.put("_source", hit.getSourceAsMap()); + for (SearchHit hit : hits) { + Map docContent = processResponse(hit); contextBuilder.append(gson.toJson(docContent)).append("\n"); } listener.onResponse((T) contextBuilder.toString()); diff --git a/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java b/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java new file mode 100644 index 00000000..5dd10759 --- /dev/null +++ b/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java @@ -0,0 +1,172 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.opensearch.ml.common.CommonValue.*; + +import java.security.AccessController; +import java.security.PrivilegedExceptionAction; +import java.util.Map; +import java.util.Objects; + +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Client; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.ml.common.spi.tools.Tool; +import org.opensearch.ml.common.spi.tools.ToolAnnotation; +import org.opensearch.ml.common.transport.connector.MLConnectorSearchAction; +import org.opensearch.ml.common.transport.model.MLModelSearchAction; +import org.opensearch.ml.common.transport.model_group.MLModelGroupSearchAction; +import org.opensearch.ml.common.utils.StringUtils; +import org.opensearch.search.SearchHit; +import org.opensearch.search.builder.SearchSourceBuilder; + +import com.google.gson.JsonObject; + +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j2; + +@Getter +@Setter +@Log4j2 +@ToolAnnotation(SearchIndexTool.TYPE) +public class SearchIndexTool implements Tool { + + public static final String INPUT_FIELD = "input"; + public static final String INDEX_FIELD = "index"; + public static final String QUERY_FIELD = "query"; + + public static final String TYPE = "SearchIndexTool"; + private static final String DEFAULT_DESCRIPTION = + "Use this tool to search an index by providing two parameters: 'index' for the index name, and 'query' for the OpenSearch DSL formatted query."; + + private String name = TYPE; + + private String description = DEFAULT_DESCRIPTION; + + private Client client; + + private NamedXContentRegistry xContentRegistry; + + public SearchIndexTool(Client client, NamedXContentRegistry xContentRegistry) { + this.client = client; + this.xContentRegistry = xContentRegistry; + } + + @Override + public String getType() { + return TYPE; + } + + @Override + public String getVersion() { + return null; + } + + @Override + public boolean validate(Map parameters) { + return parameters != null && parameters.containsKey(INPUT_FIELD) && parameters.get(INPUT_FIELD) != null; + } + + @Override + public void run(Map parameters, ActionListener listener) { + try { + String input = parameters.get(INPUT_FIELD); + JsonObject jsonObject = StringUtils.gson.fromJson(input, JsonObject.class); + String index = jsonObject.get(INDEX_FIELD).getAsString(); + String query = jsonObject.get(QUERY_FIELD).toString(); + query = "{\"query\": " + query + "}"; + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + XContentParser queryParser = XContentType.JSON + .xContent() + .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, query); + searchSourceBuilder.parseXContent(queryParser); + SearchRequest searchRequest = new SearchRequest().source(searchSourceBuilder).indices(index); + + ActionListener actionListener = ActionListener.wrap(r -> { + SearchHit[] hits = r.getHits().getHits(); + + if (hits != null && hits.length > 0) { + StringBuilder contextBuilder = new StringBuilder(); + for (SearchHit hit : hits) { + String doc = AccessController.doPrivileged((PrivilegedExceptionAction) () -> { + Map docContent = AbstractRetrieverTool.processResponse(hit); + return StringUtils.gson.toJson(docContent); + }); + contextBuilder.append(doc).append("\n"); + } + listener.onResponse((T) contextBuilder.toString()); + } else { + listener.onResponse((T) ""); + } + }, e -> { + log.error("Failed to search index", e); + listener.onFailure(e); + }); + + // since searching connector and model needs access control, we need + // to forward the request corresponding transport action + if (Objects.equals(index, ML_CONNECTOR_INDEX)) { + client.execute(MLConnectorSearchAction.INSTANCE, searchRequest, actionListener); + } else if (Objects.equals(index, ML_MODEL_INDEX)) { + client.execute(MLModelSearchAction.INSTANCE, searchRequest, actionListener); + } else if (Objects.equals(index, ML_MODEL_GROUP_INDEX)) { + client.execute(MLModelGroupSearchAction.INSTANCE, searchRequest, actionListener); + } else { + client.search(searchRequest, actionListener); + } + } catch (Exception e) { + log.error("Failed to search index", e); + listener.onFailure(e); + } + } + + public static class Factory implements Tool.Factory { + + private Client client; + private static Factory INSTANCE; + + private NamedXContentRegistry xContentRegistry; + + /** + * Create or return the singleton factory instance + */ + public static Factory getInstance() { + if (INSTANCE != null) { + return INSTANCE; + } + synchronized (SearchIndexTool.class) { + if (INSTANCE != null) { + return INSTANCE; + } + INSTANCE = new Factory(); + return INSTANCE; + } + } + + public void init(Client client, NamedXContentRegistry xContentRegistry) { + this.client = client; + this.xContentRegistry = xContentRegistry; + } + + @Override + public SearchIndexTool create(Map params) { + return new SearchIndexTool(client, xContentRegistry); + } + + @Override + public String getDefaultDescription() { + return DEFAULT_DESCRIPTION; + } + } +} diff --git a/src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java b/src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java index 5f5803f0..e55a2d8f 100644 --- a/src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java @@ -98,7 +98,7 @@ public void testRunAsyncWithSearchResults() { future.join(); assertEquals( "{\"_index\":\"hybrid-index\",\"_source\":{\"passage_text\":\"Company test_mock have a history of 100 years.\"},\"_id\":\"1\",\"_score\":89.2917}\n" - + "{\"_index\":\"hybrid-index\",\"_source\":{\"passage_text\":\"the price of the api is 2$ per invokation\"},\"_id\":\"2\",\"_score\":0.10702579}\n", + + "{\"_index\":\"hybrid-index\",\"_source\":{\"passage_text\":\"the price of the api is 2$ per invocation\"},\"_id\":\"2\",\"_score\":0.10702579}\n", future.get() ); } @@ -218,7 +218,7 @@ public void testFactory() { // Create a mock object of the abstract Factory class Client client = mock(Client.class); AbstractRetrieverTool.Factory factoryMock = new AbstractRetrieverTool.Factory<>() { - public PPLTool create(Map params) { + public AbstractRetrieverTool create(Map params) { return null; } }; diff --git a/src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java new file mode 100644 index 00000000..f90d2155 --- /dev/null +++ b/src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java @@ -0,0 +1,169 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.junit.Assert.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; + +import java.io.InputStream; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Client; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.ParsingException; +import org.opensearch.core.common.Strings; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.ml.common.transport.connector.MLConnectorSearchAction; +import org.opensearch.ml.common.transport.model.MLModelSearchAction; +import org.opensearch.search.SearchModule; + +import lombok.SneakyThrows; + +public class SearchIndexToolTests { + static public final NamedXContentRegistry TEST_XCONTENT_REGISTRY_FOR_QUERY = new NamedXContentRegistry( + new SearchModule(Settings.EMPTY, List.of()).getNamedXContents() + ); + + private Client client; + + private SearchIndexTool mockedSearchIndexTool; + + private String mockedSearchResponseString; + + @Before + @SneakyThrows + public void setup() { + client = mock(Client.class); + mockedSearchIndexTool = Mockito + .mock( + SearchIndexTool.class, + Mockito.withSettings().useConstructor(client, TEST_XCONTENT_REGISTRY_FOR_QUERY).defaultAnswer(Mockito.CALLS_REAL_METHODS) + ); + + try (InputStream searchResponseIns = SearchIndexTool.class.getResourceAsStream("retrieval_tool_search_response.json")) { + if (searchResponseIns != null) { + mockedSearchResponseString = new String(searchResponseIns.readAllBytes()); + } + } + } + + @Test + @SneakyThrows + public void testGetType() { + String type = mockedSearchIndexTool.getType(); + assertFalse(Strings.isNullOrEmpty(type)); + assertEquals("SearchIndexTool", type); + } + + @Test + @SneakyThrows + public void testValidate() { + Map parameters = Map.of("input", "{}"); + assertTrue(mockedSearchIndexTool.validate(parameters)); + } + + @Test + @SneakyThrows + public void testValidateWithEmptyInput() { + Map parameters = Map.of(); + assertFalse(mockedSearchIndexTool.validate(parameters)); + } + + @Test + public void testRunWithNormalIndex() { + String inputString = "{\"index\": \"test-index\", \"query\": {\"match_all\": {}}}"; + Map parameters = Map.of("input", inputString); + mockedSearchIndexTool.run(parameters, null); + Mockito.verify(client, times(1)).search(any(), any()); + Mockito.verify(client, Mockito.never()).execute(any(), any(), any()); + } + + @Test + public void testRunWithConnectorIndex() { + String inputString = "{\"index\": \".plugins-ml-connector\", \"query\": {\"match_all\": {}}}"; + Map parameters = Map.of("input", inputString); + mockedSearchIndexTool.run(parameters, null); + Mockito.verify(client, never()).search(any(), any()); + Mockito.verify(client, times(1)).execute(eq(MLConnectorSearchAction.INSTANCE), any(), any()); + } + + @Test + public void testRunWithModelIndex() { + String inputString = "{\"index\": \".plugins-ml-model\", \"query\": {\"match_all\": {}}}"; + Map parameters = Map.of("input", inputString); + mockedSearchIndexTool.run(parameters, null); + Mockito.verify(client, never()).search(any(), any()); + Mockito.verify(client, times(1)).execute(eq(MLModelSearchAction.INSTANCE), any(), any()); + } + + @Test + @SneakyThrows + public void testRunWithSearchResults() { + SearchResponse mockedSearchResponse = SearchResponse + .fromXContent( + JsonXContent.jsonXContent + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, mockedSearchResponseString) + ); + doAnswer(invocation -> { + ActionListener listener = invocation.getArgument(1); + listener.onResponse(mockedSearchResponse); + return null; + }).when(client).search(any(), any()); + + String inputString = "{\"index\": \"test-index\", \"query\": {\"match_all\": {}}}"; + final CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(r -> { future.complete(r); }, e -> { future.completeExceptionally(e); }); + Map parameters = Map.of("input", inputString); + mockedSearchIndexTool.run(parameters, listener); + + future.join(); + + Mockito.verify(client, times(1)).search(any(), any()); + Mockito.verify(client, Mockito.never()).execute(any(), any(), any()); + } + + @Test + @SneakyThrows + public void testRunWithEmptyQuery() { + String inputString = "{\"index\": \"test_index\"}"; + Map parameters = Map.of("input", inputString); + ActionListener listener = mock(ActionListener.class); + mockedSearchIndexTool.run(parameters, listener); + Mockito.verify(client, Mockito.never()).execute(any(), any(), any()); + Mockito.verify(client, Mockito.never()).search(any(), any()); + } + + @Test + public void testRunWithInvalidQuery() { + String inputString = "{\"index\": \"test-index\", \"query\": \"invalid query\"}"; + Map parameters = Map.of("input", inputString); + ActionListener listener = mock(ActionListener.class); + mockedSearchIndexTool.run(parameters, listener); + Mockito.verify(client, Mockito.never()).execute(any(), any(), any()); + Mockito.verify(client, Mockito.never()).search(any(), any()); + ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(ParsingException.class); + // since error message for ParsingException is different, we only need to expect ParsingException to be thrown + verify(listener).onFailure(argumentCaptor.capture()); + } + + @Test + public void testFactory() { + SearchIndexTool searchIndexTool = SearchIndexTool.Factory.getInstance().create(Collections.emptyMap()); + assertEquals(SearchIndexTool.TYPE, searchIndexTool.getType()); + } +} diff --git a/src/test/resources/org/opensearch/agent/tools/retrieval_tool_search_response.json b/src/test/resources/org/opensearch/agent/tools/retrieval_tool_search_response.json index 7e66dd60..d89ad3b0 100644 --- a/src/test/resources/org/opensearch/agent/tools/retrieval_tool_search_response.json +++ b/src/test/resources/org/opensearch/agent/tools/retrieval_tool_search_response.json @@ -27,7 +27,7 @@ "_id": "2", "_score": 0.10702579, "_source": { - "passage_text": "the price of the api is 2$ per invokation" + "passage_text": "the price of the api is 2$ per invocation" } } ] From 2c861162df887f081e2e271e4c76a24d8e1f4722 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 29 Dec 2023 12:53:03 +0800 Subject: [PATCH 016/119] Add tools to ToolPlugin (#81) (#82) (cherry picked from commit 018cd039bb98480eed623ff59d77a69018d7dd1c) Signed-off-by: Mingshi Liu Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../java/org/opensearch/agent/ToolPlugin.java | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/opensearch/agent/ToolPlugin.java b/src/main/java/org/opensearch/agent/ToolPlugin.java index a411acbb..e78b2550 100644 --- a/src/main/java/org/opensearch/agent/ToolPlugin.java +++ b/src/main/java/org/opensearch/agent/ToolPlugin.java @@ -12,7 +12,12 @@ import org.opensearch.agent.tools.NeuralSparseSearchTool; import org.opensearch.agent.tools.PPLTool; +import org.opensearch.agent.tools.RAGTool; +import org.opensearch.agent.tools.SearchAlertsTool; +import org.opensearch.agent.tools.SearchAnomalyDetectorsTool; +import org.opensearch.agent.tools.SearchAnomalyResultsTool; import org.opensearch.agent.tools.SearchIndexTool; +import org.opensearch.agent.tools.SearchMonitorsTool; import org.opensearch.agent.tools.VectorDBTool; import org.opensearch.agent.tools.VisualizationsTool; import org.opensearch.client.Client; @@ -62,6 +67,11 @@ public Collection createComponents( NeuralSparseSearchTool.Factory.getInstance().init(client, xContentRegistry); VectorDBTool.Factory.getInstance().init(client, xContentRegistry); SearchIndexTool.Factory.getInstance().init(client, xContentRegistry); + RAGTool.Factory.getInstance().init(client, xContentRegistry); + SearchAlertsTool.Factory.getInstance().init(client); + SearchAnomalyDetectorsTool.Factory.getInstance().init(client); + SearchAnomalyResultsTool.Factory.getInstance().init(client); + SearchMonitorsTool.Factory.getInstance().init(client); return Collections.emptyList(); } @@ -72,7 +82,13 @@ public List> getToolFactories() { PPLTool.Factory.getInstance(), NeuralSparseSearchTool.Factory.getInstance(), VectorDBTool.Factory.getInstance(), - VisualizationsTool.Factory.getInstance() + VisualizationsTool.Factory.getInstance(), + SearchIndexTool.Factory.getInstance(), + RAGTool.Factory.getInstance(), + SearchAlertsTool.Factory.getInstance(), + SearchAnomalyDetectorsTool.Factory.getInstance(), + SearchAnomalyResultsTool.Factory.getInstance(), + SearchMonitorsTool.Factory.getInstance() ); } } From 64c3804c0902576de8ccc0f0d5d8eab4f4bce3fa Mon Sep 17 00:00:00 2001 From: zane-neo Date: Fri, 29 Dec 2023 20:59:20 +0800 Subject: [PATCH 017/119] Fix IT run error by adding job-scheduler back to zipArchive dependency (#87) Signed-off-by: zane-neo --- build.gradle | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/build.gradle b/build.gradle index 658942a8..2a7ba02f 100644 --- a/build.gradle +++ b/build.gradle @@ -78,7 +78,7 @@ configurations { all { resolutionStrategy { force "org.mockito:mockito-core:${versions.mockito}" - force "com.google.guava:guava:32.1.3-jre" // CVE for 31.1 + force "com.google.guava:guava:33.0.0-jre" // CVE for 31.1 force("org.eclipse.platform:org.eclipse.core.runtime:3.30.0") // CVE for < 3.29.0, forces JDK17 for spotless } } @@ -108,7 +108,7 @@ dependencies { compileOnly group: 'com.google.code.gson', name: 'gson', version: '2.10.1' compileOnly "org.apache.logging.log4j:log4j-slf4j-impl:2.22.0" compileOnly group: 'org.json', name: 'json', version: '20231013' - compileOnly("com.google.guava:guava:32.1.3-jre") + compileOnly("com.google.guava:guava:33.0.0-jre") compileOnly group: 'org.apache.commons', name: 'commons-lang3', version: '3.10' compileOnly group: 'org.apache.commons', name: 'commons-text', version: '1.10.0' @@ -122,8 +122,7 @@ dependencies { // ZipArchive dependencies used for integration tests zipArchive group: 'org.opensearch.plugin', name:'opensearch-ml-plugin', version: "${version}" - //JS plugin is published to `org/opensearch` instead of `org/opensearch/plugin` under local maven repo: https://mvnrepository.com/artifact/org.opensearch/opensearch-job-scheduler. - zipArchive group: 'org.opensearch', name:'opensearch-job-scheduler', version: "${version}" + zipArchive group: 'org.opensearch.plugin', name:'opensearch-job-scheduler', version: "${version}" zipArchive "org.opensearch.plugin:opensearch-anomaly-detection:${version}" zipArchive group: 'org.opensearch.plugin', name:'opensearch-sql-plugin', version: "${version}" @@ -364,4 +363,4 @@ task updateVersion { // Include the required files that needs to be updated with new Version ant.replaceregexp(file:'build.gradle', match: '"opensearch.version", "\\d.*"', replace: '"opensearch.version", "' + newVersion.tokenize('-')[0] + '-SNAPSHOT"', flags:'g', byline:true) } -} \ No newline at end of file +} From 1de3b937b68b8449f9354527b3a648ce8c6020ef Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 16:39:38 +0800 Subject: [PATCH 018/119] [Backport 2.x] Base class for Integ test; add integ test for NeuralSparseSearchTool (#91) * Base class for Integ test; add integ test for NeuralSparseSearchTool (#86) * add common components Signed-off-by: zhichao-aws * add common components Signed-off-by: zhichao-aws * add basic components Signed-off-by: zhichao-aws * rebase main Signed-off-by: zhichao-aws * add basic components for it, add it Signed-off-by: zhichao-aws * rebase main Signed-off-by: zhichao-aws * tidy Signed-off-by: zhichao-aws * change neural sparse model to pretrained tokenizer Signed-off-by: zhichao-aws * rm redundant line Signed-off-by: zhichao-aws * add comments Signed-off-by: zhichao-aws * tidy Signed-off-by: zhichao-aws * add register connector Signed-off-by: zhichao-aws --------- Signed-off-by: zhichao-aws (cherry picked from commit 34ae75f81ed47ecc2e4006d32343e12fa388678b) Signed-off-by: github-actions[bot] * fix http dependency Signed-off-by: zhichao-aws * tidy Signed-off-by: zhichao-aws * fix read http entity twice Signed-off-by: zhichao-aws --------- Signed-off-by: zhichao-aws Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] Co-authored-by: zhichao-aws --- build.gradle | 22 ++ .../integTest/BaseAgentToolsIT.java | 242 ++++++++++++++++++ .../integTest/NeuralSparseSearchToolIT.java | 161 ++++++++++++ .../OpenSearchSecureRestTestCase.java | 163 ++++++++++++ ...eural_sparse_search_tool_request_body.json | 17 ++ ...er_sparse_encoding_model_request_body.json | 5 + 6 files changed, 610 insertions(+) create mode 100644 src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java create mode 100644 src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java create mode 100644 src/test/java/org/opensearch/integTest/OpenSearchSecureRestTestCase.java create mode 100644 src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_neural_sparse_search_tool_request_body.json create mode 100644 src/test/resources/org/opensearch/agent/tools/register_sparse_encoding_model_request_body.json diff --git a/build.gradle b/build.gradle index 2a7ba02f..f72a66a8 100644 --- a/build.gradle +++ b/build.gradle @@ -125,6 +125,8 @@ dependencies { zipArchive group: 'org.opensearch.plugin', name:'opensearch-job-scheduler', version: "${version}" zipArchive "org.opensearch.plugin:opensearch-anomaly-detection:${version}" zipArchive group: 'org.opensearch.plugin', name:'opensearch-sql-plugin', version: "${version}" + zipArchive group: 'org.opensearch.plugin', name:'opensearch-knn', version: "${version}" + zipArchive group: 'org.opensearch.plugin', name:'neural-search', version: "${version}" // Test dependencies testImplementation "org.opensearch.test:framework:${opensearch_version}" @@ -348,6 +350,26 @@ testClusters.integTest { } } +// Remote Integration Tests +task integTestRemote(type: RestIntegTestTask) { + testClassesDirs = sourceSets.test.output.classesDirs + classpath = sourceSets.test.runtimeClasspath + + systemProperty "https", System.getProperty("https") + systemProperty "user", System.getProperty("user") + systemProperty "password", System.getProperty("password") + + systemProperty 'cluster.number_of_nodes', "${_numNodes}" + + systemProperty 'tests.security.manager', 'false' + // Run tests with remote cluster only if rest case is defined + if (System.getProperty("tests.rest.cluster") != null) { + filter { + includeTestsMatching "org.opensearch.integTest.*IT" + } + } +} + // Automatically sets up the integration test cluster locally run { useCluster testClusters.integTest diff --git a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java new file mode 100644 index 00000000..dfd0d0a1 --- /dev/null +++ b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java @@ -0,0 +1,242 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.apache.commons.lang3.StringUtils; +import org.apache.http.Header; +import org.apache.http.HttpEntity; +import org.apache.http.HttpHeaders; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.message.BasicHeader; +import org.apache.http.util.EntityUtils; +import org.junit.Before; +import org.opensearch.client.*; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentHelper; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.ml.common.MLModel; +import org.opensearch.ml.common.MLTask; +import org.opensearch.ml.common.MLTaskState; +import org.opensearch.ml.common.input.execute.agent.AgentMLInput; +import org.opensearch.ml.common.output.model.ModelTensor; +import org.opensearch.ml.common.output.model.ModelTensorOutput; +import org.opensearch.ml.common.output.model.ModelTensors; + +import com.google.common.collect.ImmutableList; +import com.google.gson.Gson; + +import lombok.SneakyThrows; + +public abstract class BaseAgentToolsIT extends OpenSearchSecureRestTestCase { + public static final Gson gson = new Gson(); + private static final int MAX_TASK_RESULT_QUERY_TIME_IN_SECOND = 60 * 5; + private static final int DEFAULT_TASK_RESULT_QUERY_INTERVAL_IN_MILLISECOND = 1000; + + /** + * Update cluster settings to run ml models + */ + @Before + public void updateClusterSettings() { + updateClusterSettings("plugins.ml_commons.only_run_on_ml_node", false); + // default threshold for native circuit breaker is 90, it may be not enough on test runner machine + updateClusterSettings("plugins.ml_commons.native_memory_threshold", 100); + updateClusterSettings("plugins.ml_commons.allow_registering_model_via_url", true); + } + + @SneakyThrows + protected void updateClusterSettings(String settingKey, Object value) { + XContentBuilder builder = XContentFactory + .jsonBuilder() + .startObject() + .startObject("persistent") + .field(settingKey, value) + .endObject() + .endObject(); + Response response = makeRequest( + client(), + "PUT", + "_cluster/settings", + null, + builder.toString(), + ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "")) + ); + + assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + } + + @SneakyThrows + private Map parseResponseToMap(Response response) { + Map responseInMap = XContentHelper + .convertToMap(XContentType.JSON.xContent(), EntityUtils.toString(response.getEntity()), false); + return responseInMap; + } + + @SneakyThrows + private Object parseFieldFromResponse(Response response, String field) { + assertNotNull(field); + Map map = parseResponseToMap(response); + Object result = map.get(field); + assertNotNull(result); + return result; + } + + protected String createConnector(String requestBody) { + Response response = makeRequest(client(), "POST", "/_plugins/_ml/connectors/_create", null, requestBody, null); + assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + return parseFieldFromResponse(response, MLModel.CONNECTOR_ID_FIELD).toString(); + } + + protected String registerModel(String requestBody) { + Response response = makeRequest(client(), "POST", "/_plugins/_ml/models/_register", null, requestBody, null); + assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + return parseFieldFromResponse(response, MLTask.TASK_ID_FIELD).toString(); + } + + protected String deployModel(String modelId) { + Response response = makeRequest(client(), "POST", "/_plugins/_ml/models/" + modelId + "/_deploy", null, (String) null, null); + assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + return parseFieldFromResponse(response, MLTask.TASK_ID_FIELD).toString(); + } + + @SneakyThrows + protected Map waitTaskComplete(String taskId) { + for (int i = 0; i < MAX_TASK_RESULT_QUERY_TIME_IN_SECOND; i++) { + Response response = makeRequest(client(), "GET", "/_plugins/_ml/tasks/" + taskId, null, (String) null, null); + assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + Map responseInMap = parseResponseToMap(response); + String state = responseInMap.get(MLTask.STATE_FIELD).toString(); + if (state.equals(MLTaskState.COMPLETED.toString())) { + return responseInMap; + } + if (state.equals(MLTaskState.FAILED.toString()) + || state.equals(MLTaskState.CANCELLED.toString()) + || state.equals(MLTaskState.COMPLETED_WITH_ERROR.toString())) { + fail("The task failed with state " + state); + } + Thread.sleep(DEFAULT_TASK_RESULT_QUERY_INTERVAL_IN_MILLISECOND); + } + fail("The task failed to complete after " + MAX_TASK_RESULT_QUERY_TIME_IN_SECOND + " seconds."); + return null; + } + + // Register the model then deploy it. Returns the model_id until the model is deployed + protected String registerModelThenDeploy(String requestBody) { + String registerModelTaskId = registerModel(requestBody); + Map registerTaskResponseInMap = waitTaskComplete(registerModelTaskId); + String modelId = registerTaskResponseInMap.get(MLTask.MODEL_ID_FIELD).toString(); + String deployModelTaskId = deployModel(modelId); + waitTaskComplete(deployModelTaskId); + return modelId; + } + + protected void createIndexWithConfiguration(String indexName, String indexConfiguration) throws Exception { + Response response = makeRequest(client(), "PUT", indexName, null, indexConfiguration, null); + Map responseInMap = parseResponseToMap(response); + assertEquals("true", responseInMap.get("acknowledged").toString()); + assertEquals(indexName, responseInMap.get("index").toString()); + } + + @SneakyThrows + protected void addDocToIndex(String indexName, String docId, List fieldNames, List fieldContents) { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + for (int i = 0; i < fieldNames.size(); i++) { + builder.field(fieldNames.get(i), fieldContents.get(i)); + } + builder.endObject(); + Response response = makeRequest( + client(), + "POST", + "/" + indexName + "/_doc/" + docId + "?refresh=true", + null, + builder.toString(), + null + ); + assertEquals(RestStatus.CREATED, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + } + + public String createAgent(String requestBody) { + Response response = makeRequest(client(), "POST", "/_plugins/_ml/agents/_register", null, requestBody, null); + assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + return parseFieldFromResponse(response, AgentMLInput.AGENT_ID_FIELD).toString(); + } + + private String parseStringResponseFromExecuteAgentResponse(Response response) { + Map responseInMap = parseResponseToMap(response); + Optional optionalResult = Optional + .ofNullable(responseInMap) + .map(m -> (List) m.get(ModelTensorOutput.INFERENCE_RESULT_FIELD)) + .map(l -> (Map) l.get(0)) + .map(m -> (List) m.get(ModelTensors.OUTPUT_FIELD)) + .map(l -> (Map) l.get(0)) + .map(m -> (String) (m.get(ModelTensor.RESULT_FIELD))); + return optionalResult.get(); + } + + // execute the agent, and return the String response from the json structure + // {"inference_results": [{"output": [{"name": "response","result": "the result to return."}]}]} + public String executeAgent(String agentId, String requestBody) { + Response response = makeRequest(client(), "POST", "/_plugins/_ml/agents/" + agentId + "/_execute", null, requestBody, null); + return parseStringResponseFromExecuteAgentResponse(response); + } + + public static Response makeRequest( + RestClient client, + String method, + String endpoint, + Map params, + String jsonEntity, + List
headers + ) { + HttpEntity httpEntity = StringUtils.isBlank(jsonEntity) ? null : new StringEntity(jsonEntity, ContentType.APPLICATION_JSON); + return makeRequest(client, method, endpoint, params, httpEntity, headers); + } + + public static Response makeRequest( + RestClient client, + String method, + String endpoint, + Map params, + HttpEntity entity, + List
headers + ) { + return makeRequest(client, method, endpoint, params, entity, headers, false); + } + + @SneakyThrows + public static Response makeRequest( + RestClient client, + String method, + String endpoint, + Map params, + HttpEntity entity, + List
headers, + boolean strictDeprecationMode + ) { + Request request = new Request(method, endpoint); + + RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); + if (headers != null) { + headers.forEach(header -> options.addHeader(header.getName(), header.getValue())); + } + options.setWarningsHandler(strictDeprecationMode ? WarningsHandler.STRICT : WarningsHandler.PERMISSIVE); + request.setOptions(options.build()); + + if (params != null) { + params.forEach(request::addParameter); + } + if (entity != null) { + request.setEntity(entity); + } + return client.performRequest(request); + } +} diff --git a/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java b/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java new file mode 100644 index 00000000..2dda2095 --- /dev/null +++ b/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java @@ -0,0 +1,161 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Map; + +import org.junit.After; +import org.junit.Before; +import org.opensearch.client.ResponseException; + +import lombok.SneakyThrows; + +public class NeuralSparseSearchToolIT extends BaseAgentToolsIT { + public static String TEST_INDEX_NAME = "test_index"; + + private String modelId; + private String registerAgentRequestBody; + + @SneakyThrows + private void prepareModel() { + String requestBody = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource("org/opensearch/agent/tools/register_sparse_encoding_model_request_body.json") + .toURI() + ) + ); + modelId = registerModelThenDeploy(requestBody); + } + + @SneakyThrows + private void prepareIndex() { + createIndexWithConfiguration( + TEST_INDEX_NAME, + "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"text\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"embedding\": {\n" + + " \"type\": \"rank_features\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}" + ); + addDocToIndex(TEST_INDEX_NAME, "0", List.of("text", "embedding"), List.of("text doc 1", Map.of("hello", 1, "world", 2))); + addDocToIndex(TEST_INDEX_NAME, "1", List.of("text", "embedding"), List.of("text doc 2", Map.of("a", 3, "b", 4))); + addDocToIndex(TEST_INDEX_NAME, "2", List.of("text", "embedding"), List.of("text doc 3", Map.of("test", 5, "a", 6))); + } + + @Before + @SneakyThrows + public void setUp() { + super.setUp(); + prepareModel(); + prepareIndex(); + registerAgentRequestBody = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource("org/opensearch/agent/tools/register_flow_agent_of_neural_sparse_search_tool_request_body.json") + .toURI() + ) + ); + registerAgentRequestBody = registerAgentRequestBody.replace("", modelId); + } + + @After + @SneakyThrows + public void tearDown() { + super.tearDown(); + deleteExternalIndices(); + } + + public void testNeuralSparseSearchToolInFlowAgent() { + String agentId = createAgent(registerAgentRequestBody); + // successful case + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}"); + assertEquals( + "The agent execute response not equal with expected.", + "{\"_index\":\"test_index\",\"_source\":{\"text\":\"text doc 3\"},\"_id\":\"2\",\"_score\":2.4136734}\n" + + "{\"_index\":\"test_index\",\"_source\":{\"text\":\"text doc 2\"},\"_id\":\"1\",\"_score\":1.2068367}\n", + result + ); + + // use non-exist token to test the case the tool can not find match docs. + String result2 = executeAgent(agentId, "{\"parameters\": {\"question\": \"c\"}}"); + assertEquals("The agent execute response not equal with expected.", "Can not get any match from search result.", result2); + + // if blank input, call onFailure and get exception + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("[input] is null or empty, can not process it."), containsString("illegal_argument_exception")) + ); + } + + public void testNeuralSparseSearchToolInFlowAgent_withIllegalSourceField_thenGetEmptySource() { + String agentId = createAgent(registerAgentRequestBody.replace("text", "text2")); + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}"); + assertEquals( + "The agent execute response not equal with expected.", + "{\"_index\":\"test_index\",\"_source\":{},\"_id\":\"2\",\"_score\":2.4136734}\n" + + "{\"_index\":\"test_index\",\"_source\":{},\"_id\":\"1\",\"_score\":1.2068367}\n", + result + ); + } + + public void testNeuralSparseSearchToolInFlowAgent_withIllegalEmbeddingField_thenThrowException() { + String agentId = createAgent(registerAgentRequestBody.replace("\"embedding\"", "\"embedding2\"")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf( + containsString("failed to create query: [neural_sparse] query only works on [rank_features] fields"), + containsString("search_phase_execution_exception") + ) + ); + } + + public void testNeuralSparseSearchToolInFlowAgent_withIllegalIndexField_thenThrowException() { + String agentId = createAgent(registerAgentRequestBody.replace("test_index", "test_index2")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("no such index [test_index2]"), containsString("index_not_found_exception")) + ); + } + + public void testNeuralSparseSearchToolInFlowAgent_withIllegalModelIdField_thenThrowException() { + String agentId = createAgent(registerAgentRequestBody.replace(modelId, "test_model_id")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat(exception.getMessage(), allOf(containsString("Failed to find model"), containsString("status_exception"))); + } +} diff --git a/src/test/java/org/opensearch/integTest/OpenSearchSecureRestTestCase.java b/src/test/java/org/opensearch/integTest/OpenSearchSecureRestTestCase.java new file mode 100644 index 00000000..2838f1f2 --- /dev/null +++ b/src/test/java/org/opensearch/integTest/OpenSearchSecureRestTestCase.java @@ -0,0 +1,163 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.apache.http.Header; +import org.apache.http.HttpHost; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.message.BasicHeader; +import org.apache.http.ssl.SSLContextBuilder; +import org.junit.After; +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.client.RestClient; +import org.opensearch.client.RestClientBuilder; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.test.rest.OpenSearchRestTestCase; + +/** + * Base class for running the integration tests on a secure cluster. The plugin IT test should either extend this + * class or create another base class by extending this class to make sure that their IT can be run on secure clusters. + */ +public abstract class OpenSearchSecureRestTestCase extends OpenSearchRestTestCase { + + private static final String PROTOCOL_HTTP = "http"; + private static final String PROTOCOL_HTTPS = "https"; + private static final String SYS_PROPERTY_KEY_HTTPS = "https"; + private static final String SYS_PROPERTY_KEY_CLUSTER_ENDPOINT = "tests.rest.cluster"; + private static final String SYS_PROPERTY_KEY_USER = "user"; + private static final String SYS_PROPERTY_KEY_PASSWORD = "password"; + private static final String DEFAULT_SOCKET_TIMEOUT = "60s"; + private static final String INTERNAL_INDICES_PREFIX = "."; + private static String protocol; + + @Override + protected String getProtocol() { + if (protocol == null) { + protocol = readProtocolFromSystemProperty(); + } + return protocol; + } + + private String readProtocolFromSystemProperty() { + final boolean isHttps = Optional.ofNullable(System.getProperty(SYS_PROPERTY_KEY_HTTPS)).map("true"::equalsIgnoreCase).orElse(false); + if (!isHttps) { + return PROTOCOL_HTTP; + } + + // currently only external cluster is supported for security enabled testing + if (Optional.ofNullable(System.getProperty(SYS_PROPERTY_KEY_CLUSTER_ENDPOINT)).isEmpty()) { + throw new RuntimeException("cluster url should be provided for security enabled testing"); + } + return PROTOCOL_HTTPS; + } + + @Override + protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOException { + final RestClientBuilder builder = RestClient.builder(hosts); + if (PROTOCOL_HTTPS.equals(getProtocol())) { + configureHttpsClient(builder, settings); + } else { + configureClient(builder, settings); + } + + return builder.build(); + } + + private void configureHttpsClient(final RestClientBuilder builder, final Settings settings) { + final Map headers = ThreadContext.buildDefaultHeaders(settings); + final Header[] defaultHeaders = new Header[headers.size()]; + int i = 0; + for (Map.Entry entry : headers.entrySet()) { + defaultHeaders[i++] = new BasicHeader(entry.getKey(), entry.getValue()); + } + builder.setDefaultHeaders(defaultHeaders); + builder.setHttpClientConfigCallback(httpClientBuilder -> { + final String userName = Optional + .ofNullable(System.getProperty(SYS_PROPERTY_KEY_USER)) + .orElseThrow(() -> new RuntimeException("user name is missing")); + final String password = Optional + .ofNullable(System.getProperty(SYS_PROPERTY_KEY_PASSWORD)) + .orElseThrow(() -> new RuntimeException("password is missing")); + final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(userName, password)); + try { + return httpClientBuilder + .setDefaultCredentialsProvider(credentialsProvider) + // disable the certificate since our testing cluster just uses the default security configuration + .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE) + .setSSLContext(SSLContextBuilder.create().loadTrustMaterial(null, (chains, authType) -> true).build()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + + final String socketTimeoutString = settings.get(CLIENT_SOCKET_TIMEOUT); + final TimeValue socketTimeout = TimeValue + .parseTimeValue(socketTimeoutString == null ? DEFAULT_SOCKET_TIMEOUT : socketTimeoutString, CLIENT_SOCKET_TIMEOUT); + builder.setRequestConfigCallback(conf -> conf.setSocketTimeout(Math.toIntExact(socketTimeout.getMillis()))); + if (settings.hasValue(CLIENT_PATH_PREFIX)) { + builder.setPathPrefix(settings.get(CLIENT_PATH_PREFIX)); + } + } + + /** + * wipeAllIndices won't work since it cannot delete security index. Use deleteExternalIndices instead. + */ + @Override + protected boolean preserveIndicesUponCompletion() { + return true; + } + + @After + public void deleteExternalIndices() throws IOException { + final Response response = client().performRequest(new Request("GET", "/_cat/indices?format=json" + "&expand_wildcards=all")); + try ( + final XContentParser parser = JsonXContent.jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + response.getEntity().getContent() + ) + ) { + final XContentParser.Token token = parser.nextToken(); + final List> parserList; + if (token == XContentParser.Token.START_ARRAY) { + parserList = parser.listOrderedMap().stream().map(obj -> (Map) obj).collect(Collectors.toList()); + } else { + parserList = Collections.singletonList(parser.mapOrdered()); + } + + final List externalIndices = parserList + .stream() + .map(index -> (String) index.get("index")) + .filter(indexName -> indexName != null) + .filter(indexName -> !indexName.startsWith(INTERNAL_INDICES_PREFIX)) + .collect(Collectors.toList()); + + for (final String indexName : externalIndices) { + adminClient().performRequest(new Request("DELETE", "/" + indexName)); + } + } + } +} diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_neural_sparse_search_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_neural_sparse_search_tool_request_body.json new file mode 100644 index 00000000..ac2a2987 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_neural_sparse_search_tool_request_body.json @@ -0,0 +1,17 @@ +{ + "name": "Test_Neural_Sparse_Agent_For_RAG", + "type": "flow", + "tools": [ + { + "type": "NeuralSparseSearchTool", + "parameters": { + "description":"user this tool to search data from the test index", + "model_id": "", + "index": "test_index", + "embedding_field": "embedding", + "source_field": ["text"], + "input": "${parameters.question}" + } + } + ] +} \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/register_sparse_encoding_model_request_body.json b/src/test/resources/org/opensearch/agent/tools/register_sparse_encoding_model_request_body.json new file mode 100644 index 00000000..8eb7901c --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/register_sparse_encoding_model_request_body.json @@ -0,0 +1,5 @@ +{ + "name":"amazon/neural-sparse/opensearch-neural-sparse-tokenizer-v1", + "version":"1.0.1", + "model_format": "TORCH_SCRIPT" +} \ No newline at end of file From 3e73b7bf0d399668e8ddc31244244ee19159ff7d Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 19:38:49 +0800 Subject: [PATCH 019/119] fix: some parsing exception from search index tool. (#93) (#95) * add two more parsing method for search index tool * fix unit test failure * add two unit cases * add unit test case for model group index * add unit test case for model group index * remove unnecessary logs --------- (cherry picked from commit 8466e91816d831e3554a04c424e3ff5093039180) Signed-off-by: yuye-aws Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../agent/tools/SearchIndexTool.java | 31 ++++++++++++---- .../agent/tools/SearchIndexToolTests.java | 37 ++++++++++++++++--- 2 files changed, 54 insertions(+), 14 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java b/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java index 5dd10759..ff34718e 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java @@ -7,6 +7,7 @@ import static org.opensearch.ml.common.CommonValue.*; +import java.io.IOException; import java.security.AccessController; import java.security.PrivilegedExceptionAction; import java.util.Map; @@ -77,6 +78,13 @@ public boolean validate(Map parameters) { return parameters != null && parameters.containsKey(INPUT_FIELD) && parameters.get(INPUT_FIELD) != null; } + private SearchRequest getSearchRequest(String index, String query) throws IOException { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + XContentParser queryParser = XContentType.JSON.xContent().createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, query); + searchSourceBuilder.parseXContent(queryParser); + return new SearchRequest().source(searchSourceBuilder).indices(index); + } + @Override public void run(Map parameters, ActionListener listener) { try { @@ -84,14 +92,21 @@ public void run(Map parameters, ActionListener listener) JsonObject jsonObject = StringUtils.gson.fromJson(input, JsonObject.class); String index = jsonObject.get(INDEX_FIELD).getAsString(); String query = jsonObject.get(QUERY_FIELD).toString(); - query = "{\"query\": " + query + "}"; - - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - XContentParser queryParser = XContentType.JSON - .xContent() - .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, query); - searchSourceBuilder.parseXContent(queryParser); - SearchRequest searchRequest = new SearchRequest().source(searchSourceBuilder).indices(index); + + SearchRequest searchRequest; + try { + searchRequest = getSearchRequest(index, query); + } catch (Exception e1) { + try { + // try different json parsing method + query = jsonObject.get(QUERY_FIELD).getAsString(); + searchRequest = getSearchRequest(index, query); + } catch (Exception e2) { + // try wrapped query + query = "{\"query\": " + query + "}"; + searchRequest = getSearchRequest(index, query); + } + } ActionListener actionListener = ActionListener.wrap(r -> { SearchHit[] hits = r.getHits().getHits(); diff --git a/src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java index f90d2155..6de9fbfc 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java @@ -17,19 +17,18 @@ import org.junit.Before; import org.junit.Test; -import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Client; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.core.action.ActionListener; -import org.opensearch.core.common.ParsingException; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.ml.common.transport.connector.MLConnectorSearchAction; import org.opensearch.ml.common.transport.model.MLModelSearchAction; +import org.opensearch.ml.common.transport.model_group.MLModelGroupSearchAction; import org.opensearch.search.SearchModule; import lombok.SneakyThrows; @@ -86,7 +85,7 @@ public void testValidateWithEmptyInput() { @Test public void testRunWithNormalIndex() { - String inputString = "{\"index\": \"test-index\", \"query\": {\"match_all\": {}}}"; + String inputString = "{\"index\": \"test-index\", \"query\": {\"query\": {\"match_all\": {}}}}"; Map parameters = Map.of("input", inputString); mockedSearchIndexTool.run(parameters, null); Mockito.verify(client, times(1)).search(any(), any()); @@ -111,6 +110,15 @@ public void testRunWithModelIndex() { Mockito.verify(client, times(1)).execute(eq(MLModelSearchAction.INSTANCE), any(), any()); } + @Test + public void testRunWithModelGroupIndex() { + String inputString = "{\"index\": \".plugins-ml-model-group\", \"query\": {\"match_all\": {}}}"; + Map parameters = Map.of("input", inputString); + mockedSearchIndexTool.run(parameters, null); + Mockito.verify(client, never()).search(any(), any()); + Mockito.verify(client, times(1)).execute(eq(MLModelGroupSearchAction.INSTANCE), any(), any()); + } + @Test @SneakyThrows public void testRunWithSearchResults() { @@ -156,9 +164,26 @@ public void testRunWithInvalidQuery() { mockedSearchIndexTool.run(parameters, listener); Mockito.verify(client, Mockito.never()).execute(any(), any(), any()); Mockito.verify(client, Mockito.never()).search(any(), any()); - ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(ParsingException.class); - // since error message for ParsingException is different, we only need to expect ParsingException to be thrown - verify(listener).onFailure(argumentCaptor.capture()); + } + + @Test + public void testRunWithEmptyQueryBody() { + // this empty query should be parsed with jsonObject.get(QUERY_FIELD).getAsString(); + String inputString = "{\"index\": \"test-index\", \"query\": \"{}\"}"; + Map parameters = Map.of("input", inputString); + mockedSearchIndexTool.run(parameters, null); + Mockito.verify(client, times(1)).search(any(), any()); + Mockito.verify(client, Mockito.never()).execute(any(), any(), any()); + } + + @Test + public void testRunWithWrappedQuery() { + // this query should be wrapped liked "{\"query\": " + query + "}" + String inputString = "{\"index\": \".plugins-ml-model\", \"query\": {\"match_all\": {}}}"; + Map parameters = Map.of("input", inputString); + mockedSearchIndexTool.run(parameters, null); + Mockito.verify(client, never()).search(any(), any()); + Mockito.verify(client, times(1)).execute(eq(MLModelSearchAction.INSTANCE), any(), any()); } @Test From acc1d32b8c3423b18361e7e79ef14afcc1c99472 Mon Sep 17 00:00:00 2001 From: Tyler Ohlsen Date: Tue, 2 Jan 2024 16:31:33 -0800 Subject: [PATCH 020/119] [Backport 2.x] Integrate search detectors tool with profile API (#88) (#97) Signed-off-by: Tyler Ohlsen --- build.gradle | 2 + .../tools/SearchAnomalyDetectorsTool.java | 95 ++++- .../agent/tools/utils/ToolConstants.java | 19 + .../org/opensearch/agent/TestHelpers.java | 64 ++++ .../SearchAnomalyDetectorsToolTests.java | 339 +++++++++++++++--- 5 files changed, 467 insertions(+), 52 deletions(-) create mode 100644 src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java create mode 100644 src/test/java/org/opensearch/agent/TestHelpers.java diff --git a/build.gradle b/build.gradle index f72a66a8..d4eef32d 100644 --- a/build.gradle +++ b/build.gradle @@ -119,6 +119,8 @@ dependencies { implementation fileTree(dir: sqlJarDirectory, include: ["opensearch-sql-${version}.jar", "ppl-${version}.jar", "protocol-${version}.jar"]) compileOnly "org.opensearch:common-utils:${version}" compileOnly "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}" + compileOnly "org.opensearch:opensearch-job-scheduler-spi:${version}" + // ZipArchive dependencies used for integration tests zipArchive group: 'org.opensearch.plugin', name:'opensearch-ml-plugin', version: "${version}" diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java index de397521..d71814dc 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java @@ -6,14 +6,23 @@ package org.opensearch.agent.tools; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.ad.client.AnomalyDetectionNodeClient; +import org.opensearch.ad.model.ADTask; +import org.opensearch.ad.transport.GetAnomalyDetectorRequest; +import org.opensearch.ad.transport.GetAnomalyDetectorResponse; +import org.opensearch.agent.tools.utils.ToolConstants.DetectorStateString; import org.opensearch.client.Client; +import org.opensearch.common.lucene.uid.Versions; import org.opensearch.core.action.ActionListener; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; @@ -123,24 +132,92 @@ public void run(Map parameters, ActionListener listener) SearchRequest searchDetectorRequest = new SearchRequest().source(searchSourceBuilder); - if (running != null || disabled != null || failed != null) { - // TODO: add a listener to trigger when the first response is received, to trigger the profile API call - // to fetch the detector state, etc. - // Will need AD client to onboard the profile API first. - } - ActionListener searchDetectorListener = ActionListener.wrap(response -> { StringBuilder sb = new StringBuilder(); - SearchHit[] hits = response.getHits().getHits(); + List hits = Arrays.asList(response.getHits().getHits()); + Map hitsAsMap = hits.stream().collect(Collectors.toMap(SearchHit::getId, hit -> hit)); + + // If we need to filter by detector state, make subsequent profile API calls to each detector + if (running != null || disabled != null || failed != null) { + List> profileFutures = new ArrayList<>(); + for (SearchHit hit : hits) { + CompletableFuture profileFuture = new CompletableFuture() + .orTimeout(30000, TimeUnit.MILLISECONDS); + profileFutures.add(profileFuture); + ActionListener profileListener = ActionListener + .wrap(profileResponse -> { + profileFuture.complete(profileResponse); + }, e -> { + log.error("Failed to get anomaly detector profile.", e); + profileFuture.completeExceptionally(e); + listener.onFailure(e); + }); + + GetAnomalyDetectorRequest profileRequest = new GetAnomalyDetectorRequest( + hit.getId(), + Versions.MATCH_ANY, + true, + true, + "", + "", + false, + null + ); + adClient.getDetectorProfile(profileRequest, profileListener); + } + + List profileResponses = new ArrayList<>(); + try { + CompletableFuture> listFuture = CompletableFuture + .allOf(profileFutures.toArray(new CompletableFuture[0])) + .thenApply(v -> profileFutures.stream().map(CompletableFuture::join).collect(Collectors.toList())); + profileResponses = listFuture.join(); + } catch (Exception e) { + log.error("Failed to get all anomaly detector profiles.", e); + listener.onFailure(e); + } + + for (GetAnomalyDetectorResponse profileResponse : profileResponses) { + if (profileResponse != null && profileResponse.getDetector() != null) { + String detectorId = profileResponse.getDetector().getDetectorId(); + + // We follow the existing logic as the frontend to determine overall detector state + // https://github.com/opensearch-project/anomaly-detection-dashboards-plugin/blob/main/server/routes/utils/adHelpers.ts#L437 + String detectorState = DetectorStateString.Disabled.name(); + ADTask realtimeTask = profileResponse.getRealtimeAdTask(); + + if (realtimeTask != null) { + String taskState = realtimeTask.getState(); + if (taskState.equalsIgnoreCase("CREATED")) { + detectorState = DetectorStateString.Initializing.name(); + } else if (taskState.equalsIgnoreCase("RUNNING")) { + detectorState = DetectorStateString.Running.name(); + } else if (taskState.equalsIgnoreCase("INIT_FAILURE") + || taskState.equalsIgnoreCase("UNEXPECTED_FAILURE") + || taskState.equalsIgnoreCase("FAILED")) { + detectorState = DetectorStateString.Failed.name(); + } + } + + if ((Boolean.FALSE.equals(running) && detectorState.equals(DetectorStateString.Running.name())) + || (Boolean.FALSE.equals(disabled) && detectorState.equals(DetectorStateString.Disabled.name())) + || (Boolean.FALSE.equals(failed) && detectorState.equals(DetectorStateString.Failed.name()))) { + hitsAsMap.remove(detectorId); + } + + } + } + } + sb.append("AnomalyDetectors=["); - for (SearchHit hit : hits) { + for (SearchHit hit : hitsAsMap.values()) { sb.append("{"); sb.append("id=").append(hit.getId()).append(","); sb.append("name=").append(hit.getSourceAsMap().get("name")); sb.append("}"); } sb.append("]"); - sb.append("TotalAnomalyDetectors=").append(response.getHits().getTotalHits().value); + sb.append("TotalAnomalyDetectors=").append(hitsAsMap.size()); listener.onResponse((T) sb.toString()); }, e -> { log.error("Failed to search anomaly detectors.", e); diff --git a/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java b/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java new file mode 100644 index 00000000..a14a43b7 --- /dev/null +++ b/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java @@ -0,0 +1,19 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools.utils; + +public class ToolConstants { + // Detector state is not cleanly defined on the backend plugin. So, we persist a standard + // set of states here for users to interface with when fetching and filtering detectors. + // This follows what frontend AD users are familiar with, as we use the same parsing logic + // in SearchAnomalyDetectorsTool. + public static enum DetectorStateString { + Running, + Disabled, + Failed, + Initializing + } +} diff --git a/src/test/java/org/opensearch/agent/TestHelpers.java b/src/test/java/org/opensearch/agent/TestHelpers.java new file mode 100644 index 00000000..847bc151 --- /dev/null +++ b/src/test/java/org/opensearch/agent/TestHelpers.java @@ -0,0 +1,64 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent; + +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; + +import org.apache.lucene.search.TotalHits; +import org.mockito.Mockito; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.search.SearchResponseSections; +import org.opensearch.ad.model.ADTask; +import org.opensearch.ad.model.AnomalyDetector; +import org.opensearch.ad.transport.GetAnomalyDetectorResponse; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; +import org.opensearch.search.aggregations.Aggregations; + +public class TestHelpers { + + public static SearchResponse generateSearchResponse(SearchHit[] hits) { + TotalHits totalHits = new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO); + return new SearchResponse( + new SearchResponseSections(new SearchHits(hits, totalHits, 0), new Aggregations(new ArrayList<>()), null, false, null, null, 0), + null, + 0, + 0, + 0, + 0, + null, + null + ); + } + + public static GetAnomalyDetectorResponse generateGetAnomalyDetectorResponses(String[] detectorIds, String[] detectorStates) { + AnomalyDetector detector = Mockito.mock(AnomalyDetector.class); + // For each subsequent call to getDetectorId(), return the next detectorId in the array + when(detector.getDetectorId()).thenReturn(detectorIds[0], Arrays.copyOfRange(detectorIds, 1, detectorIds.length)); + ADTask realtimeAdTask = Mockito.mock(ADTask.class); + // For each subsequent call to getState(), return the next detectorState in the array + when(realtimeAdTask.getState()).thenReturn(detectorStates[0], Arrays.copyOfRange(detectorStates, 1, detectorStates.length)); + GetAnomalyDetectorResponse getDetectorProfileResponse = Mockito.mock(GetAnomalyDetectorResponse.class); + when(getDetectorProfileResponse.getRealtimeAdTask()).thenReturn(realtimeAdTask); + when(getDetectorProfileResponse.getDetector()).thenReturn(detector); + return getDetectorProfileResponse; + } + + public static SearchHit generateSearchDetectorHit(String detectorName, String detectorId) throws IOException { + XContentBuilder content = XContentBuilder.builder(XContentType.JSON.xContent()); + content.startObject(); + content.field("name", detectorName); + content.endObject(); + return new SearchHit(0, detectorId, null, null).sourceRef(BytesReference.bytes(content)); + } +} diff --git a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java index 37ff02a1..3c5b4295 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java @@ -7,17 +7,18 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; -import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.Locale; import java.util.Map; -import org.apache.lucene.search.TotalHits; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; @@ -26,10 +27,11 @@ import org.mockito.MockitoAnnotations; import org.opensearch.action.ActionType; import org.opensearch.action.search.SearchResponse; -import org.opensearch.action.search.SearchResponseSections; -import org.opensearch.client.AdminClient; -import org.opensearch.client.ClusterAdminClient; -import org.opensearch.client.IndicesAdminClient; +import org.opensearch.ad.transport.GetAnomalyDetectorAction; +import org.opensearch.ad.transport.GetAnomalyDetectorResponse; +import org.opensearch.ad.transport.SearchAnomalyDetectorAction; +import org.opensearch.agent.TestHelpers; +import org.opensearch.agent.tools.utils.ToolConstants.DetectorStateString; import org.opensearch.client.node.NodeClient; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.action.ActionListener; @@ -37,18 +39,10 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.ml.common.spi.tools.Tool; import org.opensearch.search.SearchHit; -import org.opensearch.search.SearchHits; -import org.opensearch.search.aggregations.Aggregations; public class SearchAnomalyDetectorsToolTests { @Mock private NodeClient nodeClient; - @Mock - private AdminClient adminClient; - @Mock - private IndicesAdminClient indicesAdminClient; - @Mock - private ClusterAdminClient clusterAdminClient; private Map nullParams; private Map emptyParams; @@ -67,22 +61,8 @@ public void setup() { @Test public void testRunWithNoDetectors() throws Exception { Tool tool = SearchAnomalyDetectorsTool.Factory.getInstance().create(Collections.emptyMap()); - - SearchHit[] hits = new SearchHit[0]; - - TotalHits totalHits = new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO); - - SearchResponse getDetectorsResponse = new SearchResponse( - new SearchResponseSections(new SearchHits(hits, totalHits, 0), new Aggregations(new ArrayList<>()), null, false, null, null, 0), - null, - 0, - 0, - 0, - 0, - null, - null - ); - String expectedResponseStr = String.format(Locale.getDefault(), "AnomalyDetectors=[]TotalAnomalyDetectors=%d", hits.length); + SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(new SearchHit[0]); + String expectedResponseStr = String.format(Locale.getDefault(), "AnomalyDetectors=[]TotalAnomalyDetectors=0"); @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); @@ -111,19 +91,7 @@ public void testRunWithSingleAnomalyDetector() throws Exception { content.endObject(); SearchHit[] hits = new SearchHit[1]; hits[0] = new SearchHit(0, detectorId, null, null).sourceRef(BytesReference.bytes(content)); - - TotalHits totalHits = new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO); - - SearchResponse getDetectorsResponse = new SearchResponse( - new SearchResponseSections(new SearchHits(hits, totalHits, 0), new Aggregations(new ArrayList<>()), null, false, null, null, 0), - null, - 0, - 0, - 0, - 0, - null, - null - ); + SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); String expectedResponseStr = String .format("AnomalyDetectors=[{id=%s,name=%s}]TotalAnomalyDetectors=%d", detectorId, detectorName, hits.length); @@ -142,6 +110,275 @@ public void testRunWithSingleAnomalyDetector() throws Exception { assertEquals(expectedResponseStr, responseCaptor.getValue()); } + @Test + public void testRunWithRunningDetectorTrue() throws Exception { + final String detectorName = "detector-1"; + final String detectorId = "detector-1-id"; + Tool tool = SearchAnomalyDetectorsTool.Factory.getInstance().create(Collections.emptyMap()); + + // Generate mock values and responses + SearchHit[] hits = new SearchHit[1]; + hits[0] = TestHelpers.generateSearchDetectorHit(detectorName, detectorId); + SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); + GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers + .generateGetAnomalyDetectorResponses(new String[] { detectorId }, new String[] { DetectorStateString.Running.name() }); + String expectedResponseStr = String + .format("AnomalyDetectors=[{id=%s,name=%s}]TotalAnomalyDetectors=%d", detectorId, detectorName, hits.length); + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); + + tool.run(Map.of("running", "true"), listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertEquals(expectedResponseStr, responseCaptor.getValue()); + } + + @Test + public void testRunWithRunningDetectorFalse() throws Exception { + final String detectorName = "detector-1"; + final String detectorId = "detector-1-id"; + Tool tool = SearchAnomalyDetectorsTool.Factory.getInstance().create(Collections.emptyMap()); + + // Generate mock values and responses + SearchHit[] hits = new SearchHit[1]; + hits[0] = TestHelpers.generateSearchDetectorHit(detectorName, detectorId); + SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); + GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers + .generateGetAnomalyDetectorResponses(new String[] { detectorId }, new String[] { DetectorStateString.Running.name() }); + String expectedResponseStr = "AnomalyDetectors=[]TotalAnomalyDetectors=0"; + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); + + tool.run(Map.of("running", "false"), listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertEquals(expectedResponseStr, responseCaptor.getValue()); + } + + @Test + public void testRunWithRunningDetectorUndefined() throws Exception { + final String detectorName = "detector-1"; + final String detectorId = "detector-1-id"; + Tool tool = SearchAnomalyDetectorsTool.Factory.getInstance().create(Collections.emptyMap()); + + // Generate mock values and responses + SearchHit[] hits = new SearchHit[1]; + hits[0] = TestHelpers.generateSearchDetectorHit(detectorName, detectorId); + SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); + GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers + .generateGetAnomalyDetectorResponses(new String[] { detectorId }, new String[] { DetectorStateString.Running.name() }); + String expectedResponseStr = String + .format("AnomalyDetectors=[{id=%s,name=%s}]TotalAnomalyDetectors=%d", detectorId, detectorName, hits.length); + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); + + tool.run(Map.of("foo", "bar"), listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertEquals(expectedResponseStr, responseCaptor.getValue()); + } + + @Test + public void testRunWithNullRealtimeTask() throws Exception { + final String detectorName = "detector-1"; + final String detectorId = "detector-1-id"; + Tool tool = SearchAnomalyDetectorsTool.Factory.getInstance().create(Collections.emptyMap()); + + // Generate mock values and responses + SearchHit[] hits = new SearchHit[1]; + hits[0] = TestHelpers.generateSearchDetectorHit(detectorName, detectorId); + SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); + GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers + .generateGetAnomalyDetectorResponses(new String[] { detectorId }, new String[] { DetectorStateString.Running.name() }); + // Overriding the mocked response to realtime task and setting to null. This occurs when + // a detector is created but is never started. + when(getDetectorProfileResponse.getRealtimeAdTask()).thenReturn(null); + String expectedResponseStr = String + .format("AnomalyDetectors=[{id=%s,name=%s}]TotalAnomalyDetectors=%d", detectorId, detectorName, hits.length); + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); + + tool.run(Map.of("disabled", "true"), listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertEquals(expectedResponseStr, responseCaptor.getValue()); + } + + @Test + public void testRunWithTaskStateCreated() throws Exception { + final String detectorName = "detector-1"; + final String detectorId = "detector-1-id"; + Tool tool = SearchAnomalyDetectorsTool.Factory.getInstance().create(Collections.emptyMap()); + + // Generate mock values and responses + SearchHit[] hits = new SearchHit[1]; + hits[0] = TestHelpers.generateSearchDetectorHit(detectorName, detectorId); + SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); + GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers + .generateGetAnomalyDetectorResponses(new String[] { detectorId }, new String[] { DetectorStateString.Running.name() }); + // Overriding the mocked response to set realtime task state to CREATED + when(getDetectorProfileResponse.getRealtimeAdTask().getState()).thenReturn("CREATED"); + String expectedResponseStr = String + .format("AnomalyDetectors=[{id=%s,name=%s}]TotalAnomalyDetectors=%d", detectorId, detectorName, hits.length); + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); + + tool.run(Map.of("running", "false"), listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertEquals(expectedResponseStr, responseCaptor.getValue()); + } + + @Test + public void testRunWithTaskStateVariousFailed() throws Exception { + final String detectorName1 = "detector-1"; + final String detectorId1 = "detector-1-id"; + final String detectorName2 = "detector-2"; + final String detectorId2 = "detector-2-id"; + final String detectorName3 = "detector-3"; + final String detectorId3 = "detector-3-id"; + Tool tool = SearchAnomalyDetectorsTool.Factory.getInstance().create(Collections.emptyMap()); + + // Generate mock values and responses + SearchHit[] hits = new SearchHit[3]; + hits[0] = TestHelpers.generateSearchDetectorHit(detectorName1, detectorId1); + hits[1] = TestHelpers.generateSearchDetectorHit(detectorName2, detectorId2); + hits[2] = TestHelpers.generateSearchDetectorHit(detectorName3, detectorId3); + SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); + GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers + .generateGetAnomalyDetectorResponses( + new String[] { detectorId1, detectorId2, detectorId3 }, + new String[] { "INIT_FAILURE", "UNEXPECTED_FAILURE", "FAILED" } + ); + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); + + tool.run(Map.of("failed", "true"), listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertTrue(responseCaptor.getValue().contains("TotalAnomalyDetectors=3")); + } + + @Test + public void testRunWithCombinedDetectorStatesTrue() throws Exception { + final String detectorName1 = "detector-1"; + final String detectorId1 = "detector-1-id"; + final String detectorName2 = "detector-2"; + final String detectorId2 = "detector-2-id"; + final String detectorName3 = "detector-3"; + final String detectorId3 = "detector-3-id"; + Tool tool = SearchAnomalyDetectorsTool.Factory.getInstance().create(Collections.emptyMap()); + + // Generate mock values and responses + SearchHit[] hits = new SearchHit[3]; + hits[0] = TestHelpers.generateSearchDetectorHit(detectorName1, detectorId1); + hits[1] = TestHelpers.generateSearchDetectorHit(detectorName2, detectorId2); + hits[2] = TestHelpers.generateSearchDetectorHit(detectorName3, detectorId3); + SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); + GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers + .generateGetAnomalyDetectorResponses( + new String[] { detectorId1, detectorId2, detectorId3 }, + new String[] { DetectorStateString.Running.name(), DetectorStateString.Disabled.name(), DetectorStateString.Failed.name() } + ); + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); + + tool.run(Map.of("running", "true", "disabled", "true", "failed", "true"), listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertTrue(responseCaptor.getValue().contains("TotalAnomalyDetectors=3")); + } + + @Test + public void testRunWithCombinedDetectorStatesFalse() throws Exception { + final String detectorName1 = "detector-1"; + final String detectorId1 = "detector-1-id"; + final String detectorName2 = "detector-2"; + final String detectorId2 = "detector-2-id"; + final String detectorName3 = "detector-3"; + final String detectorId3 = "detector-3-id"; + Tool tool = SearchAnomalyDetectorsTool.Factory.getInstance().create(Collections.emptyMap()); + + // Generate mock values and responses + SearchHit[] hits = new SearchHit[3]; + hits[0] = TestHelpers.generateSearchDetectorHit(detectorName1, detectorId1); + hits[1] = TestHelpers.generateSearchDetectorHit(detectorName2, detectorId2); + hits[2] = TestHelpers.generateSearchDetectorHit(detectorName3, detectorId3); + SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); + GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers + .generateGetAnomalyDetectorResponses( + new String[] { detectorId1, detectorId2, detectorId3 }, + new String[] { DetectorStateString.Running.name(), DetectorStateString.Disabled.name(), DetectorStateString.Failed.name() } + ); + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); + + tool.run(Map.of("running", "false", "disabled", "false", "failed", "false"), listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertTrue(responseCaptor.getValue().contains("TotalAnomalyDetectors=0")); + } + + @Test + public void testRunWithCombinedDetectorStatesMixed() throws Exception { + final String detectorName1 = "detector-1"; + final String detectorId1 = "detector-1-id"; + final String detectorName2 = "detector-2"; + final String detectorId2 = "detector-2-id"; + final String detectorName3 = "detector-3"; + final String detectorId3 = "detector-3-id"; + Tool tool = SearchAnomalyDetectorsTool.Factory.getInstance().create(Collections.emptyMap()); + + // Generate mock values and responses + SearchHit[] hits = new SearchHit[3]; + hits[0] = TestHelpers.generateSearchDetectorHit(detectorName1, detectorId1); + hits[1] = TestHelpers.generateSearchDetectorHit(detectorName2, detectorId2); + hits[2] = TestHelpers.generateSearchDetectorHit(detectorName3, detectorId3); + SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); + GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers + .generateGetAnomalyDetectorResponses( + new String[] { detectorId1, detectorId2, detectorId3 }, + new String[] { DetectorStateString.Running.name(), DetectorStateString.Disabled.name(), DetectorStateString.Failed.name() } + ); + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); + + tool.run(Map.of("running", "true", "disabled", "false", "failed", "true"), listener); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(listener, times(1)).onResponse(responseCaptor.capture()); + assertTrue(responseCaptor.getValue().contains("TotalAnomalyDetectors=2")); + } + + @Test + public void testParseParams() throws Exception { + Tool tool = SearchAnomalyDetectorsTool.Factory.getInstance().create(Collections.emptyMap()); + Map validParams = new HashMap(); + validParams.put("detectorName", "foo"); + validParams.put("indices", "foo"); + validParams.put("highCardinality", "false"); + validParams.put("lastUpdateTime", "1234"); + validParams.put("sortOrder", "foo"); + validParams.put("size", "10"); + validParams.put("startIndex", "0"); + validParams.put("running", "false"); + validParams.put("disabled", "false"); + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + assertDoesNotThrow(() -> tool.run(validParams, listener)); + assertDoesNotThrow(() -> tool.run(Map.of("detectorNamePattern", "foo*"), listener)); + assertDoesNotThrow(() -> tool.run(Map.of("sortOrder", "AsC"), listener)); + } + @Test public void testValidate() { Tool tool = SearchAnomalyDetectorsTool.Factory.getInstance().create(Collections.emptyMap()); @@ -150,4 +387,20 @@ public void testValidate() { assertTrue(tool.validate(nonEmptyParams)); assertTrue(tool.validate(nullParams)); } + + private void mockProfileApiCalls(SearchResponse getDetectorsResponse, GetAnomalyDetectorResponse getDetectorProfileResponse) { + // Mock return from initial search call + doAnswer((invocation) -> { + ActionListener responseListener = invocation.getArgument(2); + responseListener.onResponse(getDetectorsResponse); + return null; + }).when(nodeClient).execute(any(SearchAnomalyDetectorAction.class), any(), any()); + + // Mock return from secondary detector profile call + doAnswer((invocation) -> { + ActionListener responseListener = invocation.getArgument(2); + responseListener.onResponse(getDetectorProfileResponse); + return null; + }).when(nodeClient).execute(any(GetAnomalyDetectorAction.class), any(), any()); + } } From 156f54c16788870c8dcae1422f4b5fc157eafba3 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 10:48:46 +0800 Subject: [PATCH 021/119] provide PPL for querying system indices (#98) (#99) * provide PPL for querying system indices * check error message * provide all indices starting from '.' --------- (cherry picked from commit ce6b6d9da895f70d8723aa07158961536fe9d5d6) Signed-off-by: xinyual Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../org/opensearch/agent/tools/PPLTool.java | 8 ++++++++ .../opensearch/agent/tools/PPLToolTests.java | 18 ++++++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index 6cedd08d..fd383641 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -5,6 +5,8 @@ package org.opensearch.agent.tools; +import static org.opensearch.ml.common.CommonValue.*; + import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; @@ -96,6 +98,12 @@ public void run(Map parameters, ActionListener listener) if (StringUtils.isBlank(indexName) || StringUtils.isBlank(question)) { throw new IllegalArgumentException("Parameter index and question can not be null or empty."); } + if (indexName.startsWith(".")) { + throw new IllegalArgumentException( + "PPLTool doesn't support searching indices starting with '.' since it could be system index, current searching index name: " + + indexName + ); + } SearchRequest searchRequest = buildSearchRequest(indexName); GetMappingsRequest getMappingsRequest = buildGetMappingRequest(indexName); client.admin().indices().getMappings(getMappingsRequest, ActionListener.wrap(getMappingsResponse -> { diff --git a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java index 54535841..e3d725e1 100644 --- a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java @@ -10,6 +10,7 @@ import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.when; +import static org.opensearch.ml.common.CommonValue.ML_CONNECTOR_INDEX; import static org.opensearch.ml.common.utils.StringUtils.gson; import java.util.Collections; @@ -155,6 +156,23 @@ public void testTool_withPPLTag() { } + @Test + public void testTool_querySystemIndex() { + Tool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + assertEquals(PPLTool.TYPE, tool.getName()); + Exception exception = assertThrows( + IllegalArgumentException.class, + () -> tool.run(ImmutableMap.of("index", ML_CONNECTOR_INDEX, "question", "demo"), ActionListener.wrap(ppl -> { + assertEquals(pplResult, "ppl result"); + }, e -> { assertEquals("We cannot search system indices " + ML_CONNECTOR_INDEX, e.getMessage()); })) + ); + assertEquals( + "PPLTool doesn't support searching indices starting with '.' since it could be system index, current searching index name: " + + ML_CONNECTOR_INDEX, + exception.getMessage() + ); + } + @Test public void testTool_getMappingFailure() { Tool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); From d69b45a8cd84053ba9a9a7d50c937278ed5b58f1 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 16:19:23 +0800 Subject: [PATCH 022/119] ignore alias field (#101) (#103) * ignore alias field * add default value for safety --------- (cherry picked from commit 57cba900b97207494e123bd801a35a71256e95c6) Signed-off-by: xinyual Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- src/main/java/org/opensearch/agent/tools/PPLTool.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index fd383641..d3471fdf 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -283,7 +283,9 @@ private void extractNamesTypes(Map mappingSource, Map vMap = (Map) v; if (vMap.containsKey("type")) { - fieldsToType.put(prefix + n, (String) vMap.get("type")); + if (!((vMap.getOrDefault("type", "")).equals("alias"))) { + fieldsToType.put(prefix + n, (String) vMap.get("type")); + } } else if (vMap.containsKey("properties")) { extractNamesTypes((Map) vMap.get("properties"), fieldsToType, prefix + n); } From 39e43612e761439297ad0c89cfb94c830ea5b2d0 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Thu, 4 Jan 2024 11:31:43 +0800 Subject: [PATCH 023/119] Mock http server for LLM; Integration test for visualization tool (#92) (#102) * mock server and integTest for visualization * update rest status * add refresh * merge from main * rename variable name --------- (cherry picked from commit 4c76e4c26f810f9c31775517122dbc99ff6a1a30) Signed-off-by: Hailong Cui Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- build.gradle | 1 + .../agent/tools/VisualizationsTool.java | 5 +- .../opensearch/integTest/MockHttpServer.java | 52 +++++ .../opensearch/integTest/PromptHandler.java | 61 +++++ .../integTest/ToolIntegrationTest.java | 219 ++++++++++++++++++ .../integTest/VisualizationsToolIT.java | 107 +++++++++ 6 files changed, 441 insertions(+), 4 deletions(-) create mode 100644 src/test/java/org/opensearch/integTest/MockHttpServer.java create mode 100644 src/test/java/org/opensearch/integTest/PromptHandler.java create mode 100644 src/test/java/org/opensearch/integTest/ToolIntegrationTest.java create mode 100644 src/test/java/org/opensearch/integTest/VisualizationsToolIT.java diff --git a/build.gradle b/build.gradle index d4eef32d..45b3f314 100644 --- a/build.gradle +++ b/build.gradle @@ -133,6 +133,7 @@ dependencies { // Test dependencies testImplementation "org.opensearch.test:framework:${opensearch_version}" testImplementation group: 'junit', name: 'junit', version: '4.13.2' + testImplementation group: 'org.json', name: 'json', version: '20231013' testImplementation group: 'org.mockito', name: 'mockito-core', version: '5.8.0' testImplementation group: 'org.mockito', name: 'mockito-inline', version: '5.2.0' testImplementation("net.bytebuddy:byte-buddy:1.14.7") diff --git a/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java b/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java index 31f5cf09..958f1eda 100644 --- a/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java +++ b/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java @@ -16,6 +16,7 @@ import org.opensearch.client.Client; import org.opensearch.client.Requests; import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.Strings; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -24,9 +25,6 @@ import org.opensearch.search.SearchHits; import org.opensearch.search.builder.SearchSourceBuilder; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Strings; - import lombok.Builder; import lombok.Getter; import lombok.Setter; @@ -113,7 +111,6 @@ public void onFailure(Exception e) { }); } - @VisibleForTesting String trimIdPrefix(String id) { id = Optional.ofNullable(id).orElse(""); if (id.startsWith(SAVED_OBJECT_TYPE)) { diff --git a/src/test/java/org/opensearch/integTest/MockHttpServer.java b/src/test/java/org/opensearch/integTest/MockHttpServer.java new file mode 100644 index 00000000..f64adcd1 --- /dev/null +++ b/src/test/java/org/opensearch/integTest/MockHttpServer.java @@ -0,0 +1,52 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import java.io.IOException; +import java.io.InputStream; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Map; + +import com.google.gson.Gson; +import com.sun.net.httpserver.HttpServer; + +import lombok.extern.log4j.Log4j2; + +@Log4j2 +public class MockHttpServer { + + private static Gson gson = new Gson(); + + public static HttpServer setupMockLLM(List promptHandlers) throws IOException { + HttpServer server = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); + + server.createContext("/invoke", exchange -> { + InputStream ins = exchange.getRequestBody(); + String req = new String(ins.readAllBytes(), StandardCharsets.UTF_8); + Map map = gson.fromJson(req, Map.class); + String prompt = map.get("prompt"); + log.debug("prompt received: {}", prompt); + + String llmRes = ""; + for (PromptHandler promptHandler : promptHandlers) { + if (promptHandler.apply(prompt)) { + PromptHandler.LLMResponse llmResponse = new PromptHandler.LLMResponse(); + llmResponse.setCompletion(promptHandler.response(prompt)); + llmRes = gson.toJson(llmResponse); + break; + } + } + byte[] llmResBytes = llmRes.getBytes(StandardCharsets.UTF_8); + exchange.sendResponseHeaders(200, llmResBytes.length); + exchange.getResponseBody().write(llmResBytes); + exchange.close(); + }); + return server; + } +} diff --git a/src/test/java/org/opensearch/integTest/PromptHandler.java b/src/test/java/org/opensearch/integTest/PromptHandler.java new file mode 100644 index 00000000..a3f9314d --- /dev/null +++ b/src/test/java/org/opensearch/integTest/PromptHandler.java @@ -0,0 +1,61 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import com.google.gson.annotations.SerializedName; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +public class PromptHandler { + + boolean apply(String prompt) { + return prompt.contains(llmThought().getQuestion()); + } + + LLMThought llmThought() { + return new LLMThought(); + } + + String response(String prompt) { + if (prompt.contains("TOOL RESPONSE: ")) { + return "```json{\n" + + " \"thought\": \"Thought: Now I know the final answer\",\n" + + " \"final_answer\": \"final answer\"\n" + + "}```"; + } else { + return "```json{\n" + + " \"thought\": \"Thought: Let me use tool to figure out\",\n" + + " \"action\": \"" + + this.llmThought().getAction() + + "\",\n" + + " \"action_input\": \"" + + this.llmThought().getActionInput() + + "\"\n" + + "}```"; + } + } + + @Builder + @NoArgsConstructor + @AllArgsConstructor + @Data + static class LLMThought { + String question; + String action; + String actionInput; + } + + @Data + static class LLMResponse { + String completion; + @SerializedName("stop_reason") + String stopReason = "stop_sequence"; + String stop = "\\n\\nHuman:"; + } +} diff --git a/src/test/java/org/opensearch/integTest/ToolIntegrationTest.java b/src/test/java/org/opensearch/integTest/ToolIntegrationTest.java new file mode 100644 index 00000000..aba39573 --- /dev/null +++ b/src/test/java/org/opensearch/integTest/ToolIntegrationTest.java @@ -0,0 +1,219 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import java.io.IOException; +import java.io.InputStream; +import java.util.List; +import java.util.Locale; +import java.util.UUID; +import java.util.concurrent.TimeUnit; + +import org.junit.After; +import org.junit.Before; +import org.opensearch.client.Request; +import org.opensearch.client.RequestOptions; +import org.opensearch.client.Response; + +import com.google.gson.Gson; +import com.google.gson.JsonParser; +import com.sun.net.httpserver.HttpServer; + +import lombok.extern.log4j.Log4j2; + +@Log4j2 +public abstract class ToolIntegrationTest extends BaseAgentToolsIT { + protected HttpServer server; + protected String modelId; + protected String agentId; + protected String modelGroupId; + protected String connectorId; + + private final Gson gson = new Gson(); + + abstract List promptHandlers(); + + abstract String toolType(); + + @Before + public void setupTestAgent() throws IOException, InterruptedException { + server = MockHttpServer.setupMockLLM(promptHandlers()); + server.start(); + clusterSettings(false); + try { + connectorId = setUpConnector(); + } catch (Exception e) { + // Wait for ML encryption master key has been initialized + TimeUnit.SECONDS.sleep(10); + connectorId = setUpConnector(); + } + modelGroupId = setupModelGroup(); + modelId = setupLLMModel(connectorId, modelGroupId); + // wait for model to get deployed + TimeUnit.SECONDS.sleep(1); + agentId = setupConversationalAgent(modelId); + log.info("model_id: {}, agent_id: {}", modelId, agentId); + } + + @After + public void cleanUpClusterSetting() throws IOException { + clusterSettings(true); + } + + @After + public void stopMockLLM() { + server.stop(1); + } + + private String setUpConnector() { + String url = String.format(Locale.ROOT, "http://127.0.0.1:%d/invoke", server.getAddress().getPort()); + return createConnector( + "{\n" + + " \"name\": \"BedRock test claude Connector\",\n" + + " \"description\": \"The connector to BedRock service for claude model\",\n" + + " \"version\": 1,\n" + + " \"protocol\": \"aws_sigv4\",\n" + + " \"parameters\": {\n" + + " \"region\": \"us-east-1\",\n" + + " \"service_name\": \"bedrock\",\n" + + " \"anthropic_version\": \"bedrock-2023-05-31\",\n" + + " \"endpoint\": \"bedrock.us-east-1.amazonaws.com\",\n" + + " \"auth\": \"Sig_V4\",\n" + + " \"content_type\": \"application/json\",\n" + + " \"max_tokens_to_sample\": 8000,\n" + + " \"temperature\": 0.0001,\n" + + " \"response_filter\": \"$.completion\"\n" + + " },\n" + + " \"credential\": {\n" + + " \"access_key\": \"\",\n" + + " \"secret_key\": \"\"\n" + + " },\n" + + " \"actions\": [\n" + + " {\n" + + " \"action_type\": \"predict\",\n" + + " \"method\": \"POST\",\n" + + " \"url\": \"" + + url + + "\",\n" + + " \"headers\": {\n" + + " \"content-type\": \"application/json\",\n" + + " \"x-amz-content-sha256\": \"required\"\n" + + " },\n" + + " \"request_body\": \"{\\\"prompt\\\":\\\"${parameters.prompt}\\\", \\\"max_tokens_to_sample\\\":${parameters.max_tokens_to_sample}, \\\"temperature\\\":${parameters.temperature}, \\\"anthropic_version\\\":\\\"${parameters.anthropic_version}\\\" }\"\n" + + " }\n" + + " ]\n" + + "}" + ); + } + + private void clusterSettings(boolean clean) throws IOException { + if (!clean) { + updateClusterSettings("plugins.ml_commons.only_run_on_ml_node", false); + updateClusterSettings("plugins.ml_commons.memory_feature_enabled", true); + updateClusterSettings("plugins.ml_commons.trusted_connector_endpoints_regex", List.of("^.*$")); + } else { + updateClusterSettings("plugins.ml_commons.only_run_on_ml_node", null); + updateClusterSettings("plugins.ml_commons.memory_feature_enabled", null); + updateClusterSettings("plugins.ml_commons.trusted_connector_endpoints_regex", null); + } + } + + private String setupModelGroup() throws IOException { + Request request = new Request("POST", "/_plugins/_ml/model_groups/_register"); + request + .setJsonEntity( + "{\n" + + " \"name\": \"test_model_group_bedrock-" + + UUID.randomUUID() + + "\",\n" + + " \"description\": \"This is a public model group\"\n" + + "}" + ); + Response response = executeRequest(request); + + String resp = readResponse(response); + + return JsonParser.parseString(resp).getAsJsonObject().get("model_group_id").getAsString(); + } + + private String setupLLMModel(String connectorId, String modelGroupId) throws IOException { + Request request = new Request("POST", "/_plugins/_ml/models/_register?deploy=true"); + request + .setJsonEntity( + "{\n" + + " \"name\": \"Bedrock Claude V2 model\",\n" + + " \"function_name\": \"remote\",\n" + + " \"model_group_id\": \"" + + modelGroupId + + "\",\n" + + " \"description\": \"test model\",\n" + + " \"connector_id\": \"" + + connectorId + + "\"\n" + + "}" + ); + Response response = executeRequest(request); + + String resp = readResponse(response); + + return JsonParser.parseString(resp).getAsJsonObject().get("model_id").getAsString(); + } + + private String setupConversationalAgent(String modelId) throws IOException { + Request request = new Request("POST", "/_plugins/_ml/agents/_register"); + request + .setJsonEntity( + "{\n" + + " \"name\": \"integTest-agent\",\n" + + " \"type\": \"conversational\",\n" + + " \"description\": \"this is a test agent\",\n" + + " \"llm\": {\n" + + " \"model_id\": \"" + + modelId + + "\",\n" + + " \"parameters\": {\n" + + " \"max_iteration\": \"5\",\n" + + " \"stop_when_no_tool_found\": \"true\",\n" + + " \"response_filter\": \"$.completion\"\n" + + " }\n" + + " },\n" + + " \"tools\": [\n" + + " {\n" + + " \"type\": \"" + + toolType() + + "\",\n" + + " \"name\": \"" + + toolType() + + "\",\n" + + " \"include_output_in_agent_response\": true,\n" + + " \"description\": \"tool description\"\n" + + " }\n" + + " ],\n" + + " \"memory\": {\n" + + " \"type\": \"conversation_index\"\n" + + " }\n" + + "}" + ); + Response response = executeRequest(request); + + String resp = readResponse(response); + + return JsonParser.parseString(resp).getAsJsonObject().get("agent_id").getAsString(); + } + + public static Response executeRequest(Request request) throws IOException { + RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder(); + builder.addHeader("Content-Type", "application/json"); + request.setOptions(builder); + return client().performRequest(request); + } + + public static String readResponse(Response response) throws IOException { + try (InputStream ins = response.getEntity().getContent()) { + return String.join("", org.opensearch.common.io.Streams.readAllLines(ins)); + } + } +} diff --git a/src/test/java/org/opensearch/integTest/VisualizationsToolIT.java b/src/test/java/org/opensearch/integTest/VisualizationsToolIT.java new file mode 100644 index 00000000..e7f54521 --- /dev/null +++ b/src/test/java/org/opensearch/integTest/VisualizationsToolIT.java @@ -0,0 +1,107 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import java.util.UUID; + +import org.junit.Assert; +import org.opensearch.agent.tools.VisualizationsTool; +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.core.rest.RestStatus; + +import com.google.gson.JsonParser; + +import lombok.extern.log4j.Log4j2; + +@Log4j2 +public class VisualizationsToolIT extends ToolIntegrationTest { + @Override + List promptHandlers() { + return List.of(new PromptHandler() { + @Override + LLMThought llmThought() { + return LLMThought + .builder() + .action(VisualizationsTool.TYPE) + .actionInput("RAM") + .question("can you show me RAM info with visualization?") + .build(); + } + }, new PromptHandler() { + @Override + LLMThought llmThought() { + return LLMThought + .builder() + .action(VisualizationsTool.TYPE) + .actionInput("sales") + .question("how about the sales about this month?") + .build(); + } + }); + } + + String toolType() { + return VisualizationsTool.TYPE; + } + + public void testVisualizationNotFound() throws IOException { + Request request = new Request("POST", "/_plugins/_ml/agents/" + agentId + "/_execute"); + request.setJsonEntity("{\"parameters\":{\"question\":\"can you show me RAM info with visualization?\"}}"); + Response response = executeRequest(request); + String responseStr = readResponse(response); + String toolOutput = extractAdditionalInfo(responseStr); + Assert.assertEquals("No Visualization found", toolOutput); + } + + public void testVisualizationFound() throws IOException { + String title = "[eCommerce] Sales by Category"; + String id = UUID.randomUUID().toString(); + prepareVisualization(title, id); + Request request = new Request("POST", "/_plugins/_ml/agents/" + agentId + "/_execute"); + request.setJsonEntity("{\"parameters\":{\"question\":\"how about the sales about this month?\"}}"); + Response response = executeRequest(request); + String responseStr = readResponse(response); + String toolOutput = extractAdditionalInfo(responseStr); + Assert.assertEquals("Title,Id\n" + String.format(Locale.ROOT, "%s,%s\n", title, id), toolOutput); + } + + private void prepareVisualization(String title, String id) { + String body = "{\n" + + " \"visualization\": {\n" + + " \"title\": \"" + + title + + "\"\n" + + " },\n" + + " \"type\": \"visualization\"\n" + + "}"; + Response response = makeRequest(client(), "POST", String.format(Locale.ROOT, ".kibana/_doc/%s?refresh=true", id), null, body, null); + Assert.assertEquals(response.getStatusLine().getStatusCode(), RestStatus.CREATED.getStatus()); + } + + private String extractAdditionalInfo(String responseStr) { + return JsonParser + .parseString(responseStr) + .getAsJsonObject() + .get("inference_results") + .getAsJsonArray() + .get(0) + .getAsJsonObject() + .get("output") + .getAsJsonArray() + .get(0) + .getAsJsonObject() + .get("dataAsMap") + .getAsJsonObject() + .get("additional_info") + .getAsJsonObject() + .get(String.format(Locale.ROOT, "%s.output", toolType())) + .getAsString(); + } +} From 0fc9f191edecf28b6dfe10e3be4957c3e3d607df Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Thu, 4 Jan 2024 11:40:17 +0800 Subject: [PATCH 024/119] copy zip to output folder (#89) (#105) (cherry picked from commit 4573cc346e4a430cffc81aaa309ce2170ae8ec4a) Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- scripts/build.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/scripts/build.sh b/scripts/build.sh index 3b20300e..25e5db6c 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -69,3 +69,8 @@ fi ./gradlew publishPluginZipPublicationToZipStagingRepository -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER mkdir -p $OUTPUT/maven/org/opensearch cp -r ./build/local-staging-repo/org/opensearch/. $OUTPUT/maven/org/opensearch + +mkdir -p $OUTPUT/plugins +zipPath=$(find . -path \*build/distributions/*.zip) +distributions="$(dirname "${zipPath}")" +cp ${distributions}/*.zip ./$OUTPUT/plugins From f5e15ac3a2d45172f1222c1f51b787299c0edf3d Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 5 Jan 2024 09:21:59 +0800 Subject: [PATCH 025/119] Add ppl tool it (#104) (#106) * add PPL IT * add IT for ppl tool * apply spotless * remove uesless full name * apply spotless --------- (cherry picked from commit b706e1e6ee32a7352e43c73560422bf8ccc82988) Signed-off-by: xinyual Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../org/opensearch/integTest/PPLToolIT.java | 178 ++++++++++++++++++ ...r_flow_agent_of_ppl_tool_request_body.json | 13 ++ 2 files changed, 191 insertions(+) create mode 100644 src/test/java/org/opensearch/integTest/PPLToolIT.java create mode 100644 src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_ppl_tool_request_body.json diff --git a/src/test/java/org/opensearch/integTest/PPLToolIT.java b/src/test/java/org/opensearch/integTest/PPLToolIT.java new file mode 100644 index 00000000..837a8969 --- /dev/null +++ b/src/test/java/org/opensearch/integTest/PPLToolIT.java @@ -0,0 +1,178 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; + +import org.hamcrest.MatcherAssert; +import org.opensearch.agent.tools.PPLTool; +import org.opensearch.client.ResponseException; + +import lombok.SneakyThrows; +import lombok.extern.log4j.Log4j2; + +@Log4j2 +public class PPLToolIT extends ToolIntegrationTest { + + private String TEST_INDEX_NAME = "employee"; + + @Override + List promptHandlers() { + PromptHandler PPLHandler = new PromptHandler() { + @Override + String response(String prompt) { + if (prompt.contains("correct")) { + return "source=employee | where age > 56 | stats COUNT() as cnt"; + } else { + return "source=employee | asd"; + } + } + + @Override + boolean apply(String prompt) { + return true; + } + }; + return List.of(PPLHandler); + } + + @Override + String toolType() { + return PPLTool.TYPE; + } + + @SneakyThrows + public void testPPLTool() { + prepareIndex(); + String agentId = registerAgent(); + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"correct\", \"index\": \"employee\"}}"); + assertEquals( + "{\"ppl\":\"source\\u003demployee| where age \\u003e 56 | stats COUNT() as cnt\",\"executionResult\":\"{\\n \\\"schema\\\": [\\n {\\n \\\"name\\\": \\\"cnt\\\",\\n \\\"type\\\": \\\"integer\\\"\\n }\\n ],\\n \\\"datarows\\\": [\\n [\\n 0\\n ]\\n ],\\n \\\"total\\\": 1,\\n \\\"size\\\": 1\\n}\"}", + result + ); + } + + public void testPPLTool_withWrongPPLGenerated_thenThrowException() { + prepareIndex(); + String agentId = registerAgent(); + Exception exception = assertThrows( + ResponseException.class, + () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"wrong\", \"index\": \"employee\"}}") + ); + MatcherAssert.assertThat(exception.getMessage(), allOf(containsString("execute ppl:source=employee| asd, get error"))); + + } + + public void testPPLTool_withWrongModelId_thenThrowException() { + prepareIndex(); + String agentId = registerAgentWithWrongModelId(); + Exception exception = assertThrows( + ResponseException.class, + () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"correct\", \"index\": \"employee\"}}") + ); + MatcherAssert.assertThat(exception.getMessage(), allOf(containsString("Failed to find model"))); + + } + + public void testPPLTool_withSystemQuery_thenThrowException() { + prepareIndex(); + String agentId = registerAgent(); + Exception exception = assertThrows( + ResponseException.class, + () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"correct\", \"index\": \".employee\"}}") + ); + MatcherAssert + .assertThat( + exception.getMessage(), + allOf( + containsString( + "PPLTool doesn't support searching indices starting with '.' since it could be system index, current searching index name: .employee" + ) + ) + ); + + } + + public void testPPLTool_withNonExistingIndex_thenThrowException() { + prepareIndex(); + String agentId = registerAgent(); + Exception exception = assertThrows( + ResponseException.class, + () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"correct\", \"index\": \"employee2\"}}") + ); + MatcherAssert.assertThat(exception.getMessage(), allOf(containsString("no such index [employee2]"))); + } + + public void testPPLTool_withBlankInput_thenThrowException() { + prepareIndex(); + String agentId = registerAgent(); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + MatcherAssert.assertThat(exception.getMessage(), allOf(containsString("Parameter index and question can not be null or empty."))); + } + + @SneakyThrows + private String registerAgent() { + String registerAgentRequestBody = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource("org/opensearch/agent/tools/register_flow_agent_of_ppl_tool_request_body.json") + .toURI() + ) + ); + registerAgentRequestBody = registerAgentRequestBody.replace("", modelId); + String agentId = createAgent(registerAgentRequestBody); + return agentId; + } + + @SneakyThrows + private String registerAgentWithWrongModelId() { + String registerAgentRequestBody = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource("org/opensearch/agent/tools/register_flow_agent_of_ppl_tool_request_body.json") + .toURI() + ) + ); + registerAgentRequestBody = registerAgentRequestBody.replace("", "wrong_model_id"); + String agentId = createAgent(registerAgentRequestBody); + return agentId; + } + + @SneakyThrows + private void prepareIndex() { + createIndexWithConfiguration( + TEST_INDEX_NAME, + "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"age\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}" + ); + addDocToIndex(TEST_INDEX_NAME, "0", List.of("age", "name"), List.of(56, "john")); + addDocToIndex(TEST_INDEX_NAME, "1", List.of("age", "name"), List.of(56, "smith")); + } + +} diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_ppl_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_ppl_tool_request_body.json new file mode 100644 index 00000000..0e1a167e --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_ppl_tool_request_body.json @@ -0,0 +1,13 @@ +{ + "name": "Test_PPL_Agent_For_RAG", + "type": "flow", + "tools": [ + { + "type": "PPLTool", + "parameters": { + "model_id": "", + "prompt": "Below is an instruction that describes a task, paired with the index and corresponding fields that provides further context. Write a response that appropriately completes the request.\n\n### Instruction:\nI have an opensearch index with fields in the following. Now I have a question: ${indexInfo.question} Can you help me generate a PPL for that?\n\n### Index:\n${indexInfo.mappingInfo}\n\n### Fields:\n${indexInfo.indexName}\n\n### Response:\n" + } + } + ] +} \ No newline at end of file From c73fc7ac198ab3693468d405158951f686f5fc12 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 5 Jan 2024 13:24:34 +0800 Subject: [PATCH 026/119] fix: update search index tool parse logic (#100) (#107) * update search index tool parse logic * Update src/main/java/org/opensearch/agent/tools/SearchIndexTool.java * update parsing logic and unit tests * Update src/main/java/org/opensearch/agent/tools/SearchIndexTool.java * import classes in search index tool --------- (cherry picked from commit da3199246b7eda3d5ea5e4b5a428d79b52c14407) Signed-off-by: yuye-aws Signed-off-by: Yuye Zhu Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] Co-authored-by: zane-neo --- .../agent/tools/SearchIndexTool.java | 26 +++++++------------ .../agent/tools/SearchIndexToolTests.java | 21 ++++----------- 2 files changed, 14 insertions(+), 33 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java b/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java index ff34718e..c7577501 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java @@ -12,6 +12,7 @@ import java.security.PrivilegedExceptionAction; import java.util.Map; import java.util.Objects; +import java.util.Optional; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; @@ -30,6 +31,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.builder.SearchSourceBuilder; +import com.google.gson.JsonElement; import com.google.gson.JsonObject; import lombok.Getter; @@ -48,7 +50,7 @@ public class SearchIndexTool implements Tool { public static final String TYPE = "SearchIndexTool"; private static final String DEFAULT_DESCRIPTION = - "Use this tool to search an index by providing two parameters: 'index' for the index name, and 'query' for the OpenSearch DSL formatted query."; + "Use this tool to search an index by providing two parameters: 'index' for the index name, and 'query' for the OpenSearch DSL formatted query. Only use this tool when a DSL query is available."; private String name = TYPE; @@ -90,23 +92,13 @@ public void run(Map parameters, ActionListener listener) try { String input = parameters.get(INPUT_FIELD); JsonObject jsonObject = StringUtils.gson.fromJson(input, JsonObject.class); - String index = jsonObject.get(INDEX_FIELD).getAsString(); - String query = jsonObject.get(QUERY_FIELD).toString(); - - SearchRequest searchRequest; - try { - searchRequest = getSearchRequest(index, query); - } catch (Exception e1) { - try { - // try different json parsing method - query = jsonObject.get(QUERY_FIELD).getAsString(); - searchRequest = getSearchRequest(index, query); - } catch (Exception e2) { - // try wrapped query - query = "{\"query\": " + query + "}"; - searchRequest = getSearchRequest(index, query); - } + String index = Optional.ofNullable(jsonObject).map(x -> x.get(INDEX_FIELD)).map(JsonElement::getAsString).orElse(null); + String query = Optional.ofNullable(jsonObject).map(x -> x.get(QUERY_FIELD)).map(JsonElement::toString).orElse(null); + if (index == null || query == null) { + listener.onFailure(new IllegalArgumentException("SearchIndexTool's two parameter: index and query are required!")); + return; } + SearchRequest searchRequest = getSearchRequest(index, query); ActionListener actionListener = ActionListener.wrap(r -> { SearchHit[] hits = r.getHits().getHits(); diff --git a/src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java index 6de9fbfc..d228c0cb 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java @@ -94,7 +94,7 @@ public void testRunWithNormalIndex() { @Test public void testRunWithConnectorIndex() { - String inputString = "{\"index\": \".plugins-ml-connector\", \"query\": {\"match_all\": {}}}"; + String inputString = "{\"index\": \".plugins-ml-connector\", \"query\": {\"query\": {\"match_all\": {}}}}"; Map parameters = Map.of("input", inputString); mockedSearchIndexTool.run(parameters, null); Mockito.verify(client, never()).search(any(), any()); @@ -103,7 +103,7 @@ public void testRunWithConnectorIndex() { @Test public void testRunWithModelIndex() { - String inputString = "{\"index\": \".plugins-ml-model\", \"query\": {\"match_all\": {}}}"; + String inputString = "{\"index\": \".plugins-ml-model\", \"query\": {\"query\": {\"match_all\": {}}}}"; Map parameters = Map.of("input", inputString); mockedSearchIndexTool.run(parameters, null); Mockito.verify(client, never()).search(any(), any()); @@ -112,7 +112,7 @@ public void testRunWithModelIndex() { @Test public void testRunWithModelGroupIndex() { - String inputString = "{\"index\": \".plugins-ml-model-group\", \"query\": {\"match_all\": {}}}"; + String inputString = "{\"index\": \".plugins-ml-model-group\", \"query\": {\"query\": {\"match_all\": {}}}}"; Map parameters = Map.of("input", inputString); mockedSearchIndexTool.run(parameters, null); Mockito.verify(client, never()).search(any(), any()); @@ -133,7 +133,7 @@ public void testRunWithSearchResults() { return null; }).when(client).search(any(), any()); - String inputString = "{\"index\": \"test-index\", \"query\": {\"match_all\": {}}}"; + String inputString = "{\"index\": \"test-index\", \"query\": {\"query\": {\"match_all\": {}}}}"; final CompletableFuture future = new CompletableFuture<>(); ActionListener listener = ActionListener.wrap(r -> { future.complete(r); }, e -> { future.completeExceptionally(e); }); Map parameters = Map.of("input", inputString); @@ -168,24 +168,13 @@ public void testRunWithInvalidQuery() { @Test public void testRunWithEmptyQueryBody() { - // this empty query should be parsed with jsonObject.get(QUERY_FIELD).getAsString(); - String inputString = "{\"index\": \"test-index\", \"query\": \"{}\"}"; + String inputString = "{\"index\": \"test-index\", \"query\": {}}"; Map parameters = Map.of("input", inputString); mockedSearchIndexTool.run(parameters, null); Mockito.verify(client, times(1)).search(any(), any()); Mockito.verify(client, Mockito.never()).execute(any(), any(), any()); } - @Test - public void testRunWithWrappedQuery() { - // this query should be wrapped liked "{\"query\": " + query + "}" - String inputString = "{\"index\": \".plugins-ml-model\", \"query\": {\"match_all\": {}}}"; - Map parameters = Map.of("input", inputString); - mockedSearchIndexTool.run(parameters, null); - Mockito.verify(client, never()).search(any(), any()); - Mockito.verify(client, times(1)).execute(eq(MLModelSearchAction.INSTANCE), any(), any()); - } - @Test public void testFactory() { SearchIndexTool searchIndexTool = SearchIndexTool.Factory.getInstance().create(Collections.emptyMap()); From 943650e451d1ab6c775fda9847bcbe274915132c Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 5 Jan 2024 22:57:54 +0800 Subject: [PATCH 027/119] test: integration test for search index tool (#108) (#109) * integration test for search index tool * import MatcherAssert to avoid fully qualified name --------- (cherry picked from commit 815b6666a95b33d3d6f582a2ef1cb368a36ee347) Signed-off-by: yuye-aws Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../integTest/SearchIndexToolIT.java | 136 ++++++++++++++++++ ...ent_of_search_index_tool_request_body.json | 10 ++ 2 files changed, 146 insertions(+) create mode 100644 src/test/java/org/opensearch/integTest/SearchIndexToolIT.java create mode 100644 src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_index_tool_request_body.json diff --git a/src/test/java/org/opensearch/integTest/SearchIndexToolIT.java b/src/test/java/org/opensearch/integTest/SearchIndexToolIT.java new file mode 100644 index 00000000..8b7c9697 --- /dev/null +++ b/src/test/java/org/opensearch/integTest/SearchIndexToolIT.java @@ -0,0 +1,136 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import static org.hamcrest.Matchers.containsString; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; + +import org.hamcrest.MatcherAssert; +import org.junit.After; +import org.junit.Before; +import org.opensearch.client.ResponseException; + +import lombok.SneakyThrows; + +public class SearchIndexToolIT extends BaseAgentToolsIT { + public static String TEST_INDEX_NAME = "test_index"; + private String registerAgentRequestBody; + + @SneakyThrows + private void prepareIndex() { + createIndexWithConfiguration( + TEST_INDEX_NAME, + "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"text\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}" + ); + addDocToIndex(TEST_INDEX_NAME, "0", List.of("text"), List.of("text doc 1")); + addDocToIndex(TEST_INDEX_NAME, "1", List.of("text"), List.of("text doc 2")); + addDocToIndex(TEST_INDEX_NAME, "2", List.of("text"), List.of("text doc 3")); + } + + @Before + @SneakyThrows + public void setUp() { + super.setUp(); + prepareIndex(); + registerAgentRequestBody = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource("org/opensearch/agent/tools/register_flow_agent_of_search_index_tool_request_body.json") + .toURI() + ) + ); + } + + @After + @SneakyThrows + public void tearDown() { + super.tearDown(); + deleteExternalIndices(); + } + + public void testSearchIndexToolInFlowAgent_withMatchAllQuery() { + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\n" + + " \"parameters\": {\n" + + " \"input\": {\n" + + " \"index\": \"test_index\",\n" + + " \"query\": {\n" + + " \"query\": {\n" + + " \"match_all\": {}\n" + + " }\n" + + " }\n" + + " } \n" + + " }\n" + + "}\n"; + String result = executeAgent(agentId, agentInput); + assertEquals( + "The search index result not equal with expected.", + "{\"_index\":\"test_index\",\"_source\":{\"text\":\"text doc 1\"},\"_id\":\"0\",\"_score\":1.0}\n" + + "{\"_index\":\"test_index\",\"_source\":{\"text\":\"text doc 2\"},\"_id\":\"1\",\"_score\":1.0}\n" + + "{\"_index\":\"test_index\",\"_source\":{\"text\":\"text doc 3\"},\"_id\":\"2\",\"_score\":1.0}\n", + result + ); + } + + public void testSearchIndexToolInFlowAgent_withEmptyIndexField_thenThrowException() { + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\n" + + " \"parameters\": {\n" + + " \"input\": {\n" + + " \"query\": {\n" + + " \"query\": {\n" + + " \"match_all\": {}\n" + + " }\n" + + " }\n" + + " } \n" + + " }\n" + + "}\n"; + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, agentInput)); + MatcherAssert.assertThat(exception.getMessage(), containsString("SearchIndexTool's two parameter: index and query are required!")); + } + + public void testSearchIndexToolInFlowAgent_withEmptyQueryField_thenThrowException() { + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\n" + + " \"parameters\": {\n" + + " \"input\": {\n" + + " \"index\": \"test_index\"\n" + + " } \n" + + " }\n" + + "}\n"; + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, agentInput)); + MatcherAssert.assertThat(exception.getMessage(), containsString("SearchIndexTool's two parameter: index and query are required!")); + } + + public void testSearchIndexToolInFlowAgent_withIllegalQueryField_thenThrowException() { + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\n" + + " \"parameters\": {\n" + + " \"input\": {\n" + + " \"index\": \"test_index\",\n" + + " \"query\": \"Invalid Query\"\n" + + " } \n" + + " }\n" + + "}\n"; + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, agentInput)); + MatcherAssert.assertThat(exception.getMessage(), containsString("parsing_exception")); + } +} diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_index_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_index_tool_request_body.json new file mode 100644 index 00000000..52a67073 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_index_tool_request_body.json @@ -0,0 +1,10 @@ +{ + "name": "Test_Search_Index_Agent", + "type": "flow", + "tools": [ + { + "type": "SearchIndexTool", + "description": "Use this tool to search an index by providing two parameters: 'index' for the index name, and 'query' for the OpenSearch DSL formatted query. Only use this tool when a DSL query is available." + } + ] +} \ No newline at end of file From 415c9e27a594c83ed516ff98150bcfc950a6b9df Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 11:09:22 -0800 Subject: [PATCH 028/119] Set default index pattern for search AD results tool (#111) (#112) (cherry picked from commit ecb4033c4abe6e73540efe211d381423df6d3d55) Signed-off-by: Tyler Ohlsen Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../agent/tools/SearchAnomalyResultsTool.java | 6 ++++- .../agent/tools/utils/ToolConstants.java | 4 ++++ .../tools/SearchAnomalyResultsToolTests.java | 22 +++++++++++++++++++ 3 files changed, 31 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java index ef1a44dd..a222e6f2 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java @@ -13,6 +13,7 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.ad.client.AnomalyDetectionNodeClient; +import org.opensearch.agent.tools.utils.ToolConstants; import org.opensearch.client.Client; import org.opensearch.core.action.ActionListener; import org.opensearch.index.query.BoolQueryBuilder; @@ -130,7 +131,10 @@ public void run(Map parameters, ActionListener listener) .from(startIndex) .sort(sortString, sortOrder); - SearchRequest searchAnomalyResultsRequest = new SearchRequest().source(searchSourceBuilder); + // In the future we may support custom result indices. For now default to the default AD result system indices. + SearchRequest searchAnomalyResultsRequest = new SearchRequest() + .source(searchSourceBuilder) + .indices(ToolConstants.AD_RESULTS_INDEX_PATTERN); ActionListener searchAnomalyResultsListener = ActionListener.wrap(response -> { StringBuilder sb = new StringBuilder(); diff --git a/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java b/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java index a14a43b7..7e71504d 100644 --- a/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java +++ b/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java @@ -16,4 +16,8 @@ public static enum DetectorStateString { Failed, Initializing } + + // System indices constants are not cleanly exposed from the AD plugin, so we persist our + // own constant here. + public static final String AD_RESULTS_INDEX_PATTERN = ".opendistro-anomaly-results*"; } diff --git a/src/test/java/org/opensearch/agent/tools/SearchAnomalyResultsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAnomalyResultsToolTests.java index c9d83de2..035b6406 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchAnomalyResultsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchAnomalyResultsToolTests.java @@ -6,6 +6,7 @@ package org.opensearch.agent.tools; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; @@ -28,8 +29,10 @@ import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import org.opensearch.action.ActionType; +import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchResponseSections; +import org.opensearch.agent.tools.utils.ToolConstants; import org.opensearch.client.AdminClient; import org.opensearch.client.ClusterAdminClient; import org.opensearch.client.IndicesAdminClient; @@ -182,6 +185,25 @@ public void testRunWithSingleResult() throws Exception { assertEquals(expectedResponseStr, responseCaptor.getValue()); } + @Test + public void testDefaultIndexPatternIsSet() throws Exception { + Tool tool = SearchAnomalyResultsTool.Factory.getInstance().create(Collections.emptyMap()); + + @SuppressWarnings("unchecked") + ActionListener listener = Mockito.mock(ActionListener.class); + + doAnswer((invocation) -> { + SearchRequest generatedRequest = invocation.getArgument(1); + String[] indices = generatedRequest.indices(); + assertNotNull(indices); + assertEquals(1, indices.length); + assertEquals(ToolConstants.AD_RESULTS_INDEX_PATTERN, indices[0]); + return null; + }).when(nodeClient).execute(any(ActionType.class), any(), any()); + + tool.run(emptyParams, listener); + } + @Test public void testValidate() { Tool tool = SearchAnomalyResultsTool.Factory.getInstance().create(Collections.emptyMap()); From 6656a516295b2132f41ae02345e481a291f32809 Mon Sep 17 00:00:00 2001 From: Tyler Ohlsen Date: Wed, 10 Jan 2024 10:51:38 -0800 Subject: [PATCH 029/119] [Backport 2.x] Various AD tool improvements; add corresponding IT (#117) (#119) Signed-off-by: Tyler Ohlsen Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../tools/SearchAnomalyDetectorsTool.java | 40 ++++--- .../agent/tools/SearchAnomalyResultsTool.java | 46 +++++--- .../agent/tools/utils/ToolConstants.java | 1 + .../integTest/BaseAgentToolsIT.java | 39 +++++++ .../SearchAnomalyDetectorsToolIT.java | 95 +++++++++++++++ .../integTest/SearchAnomalyResultsToolIT.java | 109 ++++++++++++++++++ ...rch_anomaly_results_tool_request_body.json | 10 ++ ...of_search_detectors_tool_request_body.json | 10 ++ 8 files changed, 323 insertions(+), 27 deletions(-) create mode 100644 src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java create mode 100644 src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java create mode 100644 src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_anomaly_results_tool_request_body.json create mode 100644 src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_detectors_tool_request_body.json diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java index d71814dc..bfffa8b4 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java @@ -7,6 +7,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; @@ -20,10 +21,12 @@ import org.opensearch.ad.model.ADTask; import org.opensearch.ad.transport.GetAnomalyDetectorRequest; import org.opensearch.ad.transport.GetAnomalyDetectorResponse; +import org.opensearch.agent.tools.utils.ToolConstants; import org.opensearch.agent.tools.utils.ToolConstants.DetectorStateString; import org.opensearch.client.Client; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.core.action.ActionListener; +import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.RangeQueryBuilder; @@ -130,7 +133,7 @@ public void run(Map parameters, ActionListener listener) .from(startIndex) .sort(sortString, sortOrder); - SearchRequest searchDetectorRequest = new SearchRequest().source(searchSourceBuilder); + SearchRequest searchDetectorRequest = new SearchRequest().source(searchSourceBuilder).indices(ToolConstants.AD_DETECTORS_INDEX); ActionListener searchDetectorListener = ActionListener.wrap(response -> { StringBuilder sb = new StringBuilder(); @@ -209,19 +212,16 @@ public void run(Map parameters, ActionListener listener) } } - sb.append("AnomalyDetectors=["); - for (SearchHit hit : hitsAsMap.values()) { - sb.append("{"); - sb.append("id=").append(hit.getId()).append(","); - sb.append("name=").append(hit.getSourceAsMap().get("name")); - sb.append("}"); - } - sb.append("]"); - sb.append("TotalAnomalyDetectors=").append(hitsAsMap.size()); - listener.onResponse((T) sb.toString()); + processHits(hitsAsMap, listener); }, e -> { - log.error("Failed to search anomaly detectors.", e); - listener.onFailure(e); + // System index isn't initialized by default, so ignore such errors + if (e instanceof IndexNotFoundException) { + processHits(Collections.emptyMap(), listener); + } else { + log.error("Failed to search anomaly detectors.", e); + listener.onFailure(e); + } + }); adClient.searchAnomalyDetectors(searchDetectorRequest, searchDetectorListener); @@ -237,6 +237,20 @@ public String getType() { return TYPE; } + private void processHits(Map hitsAsMap, ActionListener listener) { + StringBuilder sb = new StringBuilder(); + sb.append("AnomalyDetectors=["); + for (SearchHit hit : hitsAsMap.values()) { + sb.append("{"); + sb.append("id=").append(hit.getId()).append(","); + sb.append("name=").append(hit.getSourceAsMap().get("name")); + sb.append("}"); + } + sb.append("]"); + sb.append("TotalAnomalyDetectors=").append(hitsAsMap.size()); + listener.onResponse((T) sb.toString()); + } + /** * Factory for the {@link SearchAnomalyDetectorsTool} */ diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java index a222e6f2..06c5303d 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java @@ -16,6 +16,7 @@ import org.opensearch.agent.tools.utils.ToolConstants; import org.opensearch.client.Client; import org.opensearch.core.action.ActionListener; +import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.ExistsQueryBuilder; import org.opensearch.index.query.QueryBuilder; @@ -26,12 +27,15 @@ import org.opensearch.ml.common.spi.tools.Tool; import org.opensearch.ml.common.spi.tools.ToolAnnotation; import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.SortOrder; import lombok.Getter; import lombok.Setter; +import lombok.extern.log4j.Log4j2; +@Log4j2 @ToolAnnotation(SearchAnomalyResultsTool.TYPE) public class SearchAnomalyResultsTool implements Tool { public static final String TYPE = "SearchAnomalyResultsTool"; @@ -89,7 +93,7 @@ public void run(Map parameters, ActionListener listener) : null; final String sortOrderStr = parameters.getOrDefault("sortOrder", "asc"); final SortOrder sortOrder = sortOrderStr.equalsIgnoreCase("asc") ? SortOrder.ASC : SortOrder.DESC; - final String sortString = parameters.getOrDefault("sortString", "name.keyword"); + final String sortString = parameters.getOrDefault("sortString", "data_start_time"); final int size = parameters.containsKey("size") ? Integer.parseInt(parameters.get("size")) : 20; final int startIndex = parameters.containsKey("startIndex") ? Integer.parseInt(parameters.get("startIndex")) : 0; @@ -137,20 +141,17 @@ public void run(Map parameters, ActionListener listener) .indices(ToolConstants.AD_RESULTS_INDEX_PATTERN); ActionListener searchAnomalyResultsListener = ActionListener.wrap(response -> { - StringBuilder sb = new StringBuilder(); - SearchHit[] hits = response.getHits().getHits(); - sb.append("AnomalyResults=["); - for (SearchHit hit : hits) { - sb.append("{"); - sb.append("detectorId=").append(hit.getSourceAsMap().get("detector_id")).append(","); - sb.append("grade=").append(hit.getSourceAsMap().get("anomaly_grade")).append(","); - sb.append("confidence=").append(hit.getSourceAsMap().get("confidence")); - sb.append("}"); + processHits(response.getHits(), listener); + }, e -> { + // System index isn't initialized by default, so ignore such errors + if (e instanceof IndexNotFoundException) { + processHits(SearchHits.empty(), listener); + } else { + log.error("Failed to search anomaly results.", e); + listener.onFailure(e); + } - sb.append("]"); - sb.append("TotalAnomalyResults=").append(response.getHits().getTotalHits().value); - listener.onResponse((T) sb.toString()); - }, e -> { listener.onFailure(e); }); + }); adClient.searchAnomalyResults(searchAnomalyResultsRequest, searchAnomalyResultsListener); } @@ -165,6 +166,23 @@ public String getType() { return TYPE; } + private void processHits(SearchHits searchHits, ActionListener listener) { + SearchHit[] hits = searchHits.getHits(); + + StringBuilder sb = new StringBuilder(); + sb.append("AnomalyResults=["); + for (SearchHit hit : hits) { + sb.append("{"); + sb.append("detectorId=").append(hit.getSourceAsMap().get("detector_id")).append(","); + sb.append("grade=").append(hit.getSourceAsMap().get("anomaly_grade")).append(","); + sb.append("confidence=").append(hit.getSourceAsMap().get("confidence")); + sb.append("}"); + } + sb.append("]"); + sb.append("TotalAnomalyResults=").append(searchHits.getTotalHits().value); + listener.onResponse((T) sb.toString()); + } + /** * Factory for the {@link SearchAnomalyResultsTool} */ diff --git a/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java b/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java index 7e71504d..047075c2 100644 --- a/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java +++ b/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java @@ -20,4 +20,5 @@ public static enum DetectorStateString { // System indices constants are not cleanly exposed from the AD plugin, so we persist our // own constant here. public static final String AD_RESULTS_INDEX_PATTERN = ".opendistro-anomaly-results*"; + public static final String AD_DETECTORS_INDEX = ".opendistro-anomaly-detectors"; } diff --git a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java index dfd0d0a1..993e2b08 100644 --- a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java +++ b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java @@ -5,9 +5,12 @@ package org.opensearch.integTest; +import java.io.IOException; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.http.Header; @@ -22,8 +25,12 @@ import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.ml.common.MLModel; import org.opensearch.ml.common.MLTask; import org.opensearch.ml.common.MLTaskState; @@ -146,6 +153,38 @@ protected void createIndexWithConfiguration(String indexName, String indexConfig assertEquals(indexName, responseInMap.get("index").toString()); } + // Similar to deleteExternalIndices, but including indices with "." prefix vs. excluding them + protected void deleteSystemIndices() throws IOException { + final Response response = client().performRequest(new Request("GET", "/_cat/indices?format=json" + "&expand_wildcards=all")); + try ( + final XContentParser parser = JsonXContent.jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + response.getEntity().getContent() + ) + ) { + final XContentParser.Token token = parser.nextToken(); + final List> parserList; + if (token == XContentParser.Token.START_ARRAY) { + parserList = parser.listOrderedMap().stream().map(obj -> (Map) obj).collect(Collectors.toList()); + } else { + parserList = Collections.singletonList(parser.mapOrdered()); + } + + final List externalIndices = parserList + .stream() + .map(index -> (String) index.get("index")) + .filter(indexName -> indexName != null) + .filter(indexName -> indexName.startsWith(".")) + .collect(Collectors.toList()); + + for (final String indexName : externalIndices) { + adminClient().performRequest(new Request("DELETE", "/" + indexName)); + } + } + } + @SneakyThrows protected void addDocToIndex(String indexName, String docId, List fieldNames, List fieldContents) { XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); diff --git a/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java b/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java new file mode 100644 index 00000000..0faa7a21 --- /dev/null +++ b/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java @@ -0,0 +1,95 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Locale; + +import org.junit.After; +import org.junit.Before; +import org.opensearch.agent.tools.utils.ToolConstants; + +import lombok.SneakyThrows; + +public class SearchAnomalyDetectorsToolIT extends BaseAgentToolsIT { + private String registerAgentRequestBody; + private static final String detectorId = "foo-id"; + private static final String detectorName = "foo-name"; + + @Before + @SneakyThrows + public void setUp() { + super.setUp(); + registerAgentRequestBody = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource("org/opensearch/agent/tools/register_flow_agent_of_search_detectors_tool_request_body.json") + .toURI() + ) + ); + createDetectorsSystemIndex(detectorId, detectorName); + } + + @After + @SneakyThrows + public void tearDown() { + super.tearDown(); + deleteExternalIndices(); + deleteSystemIndices(); + } + + @SneakyThrows + public void testSearchAnomalyDetectorsToolInFlowAgent_withNoSystemIndex() { + deleteSystemIndices(); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"detectorName\": \"" + detectorName + "\"}}"; + String result = executeAgent(agentId, agentInput); + assertEquals("AnomalyDetectors=[]TotalAnomalyDetectors=0", result); + } + + @SneakyThrows + public void testSearchAnomalyDetectorsToolInFlowAgent_noMatching() { + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"detectorName\": \"" + detectorName + "foo" + "\"}}"; + String result = executeAgent(agentId, agentInput); + assertEquals("AnomalyDetectors=[]TotalAnomalyDetectors=0", result); + } + + @SneakyThrows + public void testSearchAnomalyDetectorsToolInFlowAgent_matching() { + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"detectorName\": \"" + detectorName + "\"}}"; + String result = executeAgent(agentId, agentInput); + assertEquals( + String.format(Locale.ROOT, "AnomalyDetectors=[{id=%s,name=%s}]TotalAnomalyDetectors=%d", detectorId, detectorName, 1), + result + ); + } + + @SneakyThrows + private void createDetectorsSystemIndex(String detectorId, String detectorName) { + createIndexWithConfiguration( + ToolConstants.AD_DETECTORS_INDEX, + "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": { \"keyword\": { \"type\": \"keyword\", \"ignore_above\": 256 }}" + + " }\n" + + " }\n" + + " }\n" + + "}" + ); + addDocToIndex(ToolConstants.AD_DETECTORS_INDEX, detectorId, List.of("name"), List.of(detectorName)); + } +} diff --git a/src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java b/src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java new file mode 100644 index 00000000..6454af5e --- /dev/null +++ b/src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java @@ -0,0 +1,109 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Locale; + +import org.junit.After; +import org.junit.Before; + +import lombok.SneakyThrows; + +public class SearchAnomalyResultsToolIT extends BaseAgentToolsIT { + private String registerAgentRequestBody; + private static final String detectorId = "foo-id"; + private static final double anomalyGrade = 0.5; + private static final double confidence = 0.6; + private static final String resultsSystemIndexName = ".opendistro-anomaly-results-1"; + + @Before + @SneakyThrows + public void setUp() { + super.setUp(); + registerAgentRequestBody = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource("org/opensearch/agent/tools/register_flow_agent_of_search_anomaly_results_tool_request_body.json") + .toURI() + ) + ); + createAnomalyResultsSystemIndex(detectorId, anomalyGrade, confidence); + } + + @After + @SneakyThrows + public void tearDown() { + super.tearDown(); + deleteExternalIndices(); + deleteSystemIndices(); + } + + @SneakyThrows + public void testSearchAnomalyResultsToolInFlowAgent_withNoSystemIndex() { + deleteSystemIndices(); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"detectorId\": \"" + detectorId + "\"}}"; + String result = executeAgent(agentId, agentInput); + assertEquals("AnomalyResults=[]TotalAnomalyResults=0", result); + } + + @SneakyThrows + public void testSearchAnomalyResultsToolInFlowAgent_noMatching() { + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"detectorId\": \"" + detectorId + "foo" + "\"}}"; + String result = executeAgent(agentId, agentInput); + assertEquals("AnomalyResults=[]TotalAnomalyResults=0", result); + } + + @SneakyThrows + public void testSearchAnomalyResultsToolInFlowAgent_matching() { + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"detectorId\": \"" + detectorId + "\"}}"; + String result = executeAgent(agentId, agentInput); + assertEquals( + String + .format( + Locale.ROOT, + "AnomalyResults=[{detectorId=%s,grade=%2.1f,confidence=%2.1f}]TotalAnomalyResults=%d", + detectorId, + anomalyGrade, + confidence, + 1 + ), + result + ); + } + + @SneakyThrows + private void createAnomalyResultsSystemIndex(String detectorId, double anomalyGrade, double confidence) { + createIndexWithConfiguration( + resultsSystemIndexName, + "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"detector_id\": {\"type\": \"keyword\"}," + + " \"anomaly_grade\": {\"type\": \"double\"}," + + " \"confidence\": {\"type\": \"double\"}," + + " \"data_start_time\": {\"type\": \"date\", \"format\": \"strict_date_time||epoch_millis\"}" + + " }\n" + + " }\n" + + "}" + ); + addDocToIndex( + resultsSystemIndexName, + "foo-id", + List.of("detector_id", "anomaly_grade", "confidence"), + List.of(detectorId, anomalyGrade, confidence) + ); + } +} diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_anomaly_results_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_anomaly_results_tool_request_body.json new file mode 100644 index 00000000..710a2518 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_anomaly_results_tool_request_body.json @@ -0,0 +1,10 @@ +{ + "name": "Test_Search_Anomaly_Results_Agent", + "type": "flow", + "tools": [ + { + "type": "SearchAnomalyResultsTool", + "description": "Use this tool to search anomaly results." + } + ] +} \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_detectors_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_detectors_tool_request_body.json new file mode 100644 index 00000000..fc362709 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_detectors_tool_request_body.json @@ -0,0 +1,10 @@ +{ + "name": "Test_Search_Detectors_Agent", + "type": "flow", + "tools": [ + { + "type": "SearchAnomalyDetectorsTool", + "description": "Use this tool to search anomaly detectors." + } + ] +} \ No newline at end of file From 04693a6fac8d1b099c3e799bd164da11b3106965 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 12 Jan 2024 16:23:12 +0800 Subject: [PATCH 030/119] fix compile error (#114) (#115) * fix compile error * fix compile error --------- (cherry picked from commit 7a596a0c0c70cf70d3eeccf525991db322bbeaa7) Signed-off-by: Hailong Cui Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../opensearch/agent/tools/NeuralSparseSearchTool.java | 10 ++++++++++ src/main/java/org/opensearch/agent/tools/PPLTool.java | 10 ++++++++++ src/main/java/org/opensearch/agent/tools/RAGTool.java | 10 ++++++++++ .../org/opensearch/agent/tools/SearchAlertsTool.java | 10 ++++++++++ .../agent/tools/SearchAnomalyDetectorsTool.java | 10 ++++++++++ .../agent/tools/SearchAnomalyResultsTool.java | 10 ++++++++++ .../org/opensearch/agent/tools/SearchIndexTool.java | 10 ++++++++++ .../org/opensearch/agent/tools/SearchMonitorsTool.java | 10 ++++++++++ .../java/org/opensearch/agent/tools/VectorDBTool.java | 10 ++++++++++ .../org/opensearch/agent/tools/VisualizationsTool.java | 10 ++++++++++ .../agent/tools/AbstractRetrieverToolTests.java | 10 ++++++++++ 11 files changed, 110 insertions(+) diff --git a/src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java b/src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java index 40c57aba..9e2ba1f7 100644 --- a/src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java +++ b/src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java @@ -106,5 +106,15 @@ public NeuralSparseSearchTool create(Map params) { .docSize(docSize) .build(); } + + @Override + public String getDefaultType() { + return TYPE; + } + + @Override + public String getDefaultVersion() { + return null; + } } } diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index d3471fdf..6a3e2fcf 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -213,6 +213,16 @@ public PPLTool create(Map map) { public String getDefaultDescription() { return DEFAULT_DESCRIPTION; } + + @Override + public String getDefaultType() { + return TYPE; + } + + @Override + public String getDefaultVersion() { + return null; + } } private SearchRequest buildSearchRequest(String indexName) { diff --git a/src/main/java/org/opensearch/agent/tools/RAGTool.java b/src/main/java/org/opensearch/agent/tools/RAGTool.java index 7c9c26c5..a88930fd 100644 --- a/src/main/java/org/opensearch/agent/tools/RAGTool.java +++ b/src/main/java/org/opensearch/agent/tools/RAGTool.java @@ -275,5 +275,15 @@ public RAGTool create(Map params) { public String getDefaultDescription() { return DEFAULT_DESCRIPTION; } + + @Override + public String getDefaultType() { + return TYPE; + } + + @Override + public String getDefaultVersion() { + return null; + } } } diff --git a/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java index 3ade5b33..88797278 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java @@ -177,6 +177,16 @@ public SearchAlertsTool create(Map map) { public String getDefaultDescription() { return DEFAULT_DESCRIPTION; } + + @Override + public String getDefaultType() { + return TYPE; + } + + @Override + public String getDefaultVersion() { + return null; + } } } diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java index bfffa8b4..364b5ad3 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java @@ -295,6 +295,16 @@ public SearchAnomalyDetectorsTool create(Map map) { public String getDefaultDescription() { return DEFAULT_DESCRIPTION; } + + @Override + public String getDefaultType() { + return TYPE; + } + + @Override + public String getDefaultVersion() { + return null; + } } } diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java index 06c5303d..ef6a3a04 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java @@ -227,6 +227,16 @@ public SearchAnomalyResultsTool create(Map map) { public String getDefaultDescription() { return DEFAULT_DESCRIPTION; } + + @Override + public String getDefaultType() { + return TYPE; + } + + @Override + public String getDefaultVersion() { + return null; + } } } diff --git a/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java b/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java index c7577501..f6d5a80a 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java @@ -175,5 +175,15 @@ public SearchIndexTool create(Map params) { public String getDefaultDescription() { return DEFAULT_DESCRIPTION; } + + @Override + public String getDefaultType() { + return TYPE; + } + + @Override + public String getDefaultVersion() { + return null; + } } } diff --git a/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java index 21975080..a737bab2 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java @@ -240,6 +240,16 @@ public SearchMonitorsTool create(Map map) { public String getDefaultDescription() { return DEFAULT_DESCRIPTION; } + + @Override + public String getDefaultType() { + return TYPE; + } + + @Override + public String getDefaultVersion() { + return null; + } } } diff --git a/src/main/java/org/opensearch/agent/tools/VectorDBTool.java b/src/main/java/org/opensearch/agent/tools/VectorDBTool.java index 428b9f14..aea3a01c 100644 --- a/src/main/java/org/opensearch/agent/tools/VectorDBTool.java +++ b/src/main/java/org/opensearch/agent/tools/VectorDBTool.java @@ -115,5 +115,15 @@ public VectorDBTool create(Map params) { .k(k) .build(); } + + @Override + public String getDefaultType() { + return TYPE; + } + + @Override + public String getDefaultVersion() { + return null; + } } } diff --git a/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java b/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java index 958f1eda..35bb5c1c 100644 --- a/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java +++ b/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java @@ -164,5 +164,15 @@ public VisualizationsTool create(Map params) { public String getDefaultDescription() { return DEFAULT_DESCRIPTION; } + + @Override + public String getDefaultType() { + return TYPE; + } + + @Override + public String getDefaultVersion() { + return null; + } } } diff --git a/src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java b/src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java index e55a2d8f..04b5f473 100644 --- a/src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/AbstractRetrieverToolTests.java @@ -221,6 +221,16 @@ public void testFactory() { public AbstractRetrieverTool create(Map params) { return null; } + + @Override + public String getDefaultType() { + return null; + } + + @Override + public String getDefaultVersion() { + return null; + } }; factoryMock.init(client, TEST_XCONTENT_REGISTRY_FOR_QUERY); From bd72f5b273f148d44a4a74465587c0ce7336d4c6 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 12 Jan 2024 09:00:13 -0800 Subject: [PATCH 031/119] feature: tune description on monitor and anomaly detection (#120) (#126) (cherry picked from commit c8b68984b5ec576f3d214cf5f732c05a2809ab95) Signed-off-by: yuye-aws Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java | 3 ++- .../org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java | 3 ++- .../org/opensearch/agent/tools/SearchAnomalyResultsTool.java | 3 ++- .../java/org/opensearch/agent/tools/SearchMonitorsTool.java | 3 ++- 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java index 88797278..0997198f 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java @@ -32,7 +32,8 @@ @ToolAnnotation(SearchAlertsTool.TYPE) public class SearchAlertsTool implements Tool { public static final String TYPE = "SearchAlertsTool"; - private static final String DEFAULT_DESCRIPTION = "Use this tool to search alerts."; + private static final String DEFAULT_DESCRIPTION = + "This is a tool that finds alert trigger information. It takes 12 optional argument named sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is monitor_name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0), and searchString which defines the search string to use for searching a specific alert (default is an empty String), and severityLevel which defines the severity level to filter for (default is ALL), and alertState which defines the alert state to filter for (default is ALL), and monitorId which defines the monitor ID to filter for, and alertIndex which defines the alert index to search from (default is null), and monitorIds which defines the list of monitor IDs to filter for, and workflowIds which defines the list of workflow IDs to filter for(default is null), and alertIds which defines the list of alert IDs to filter for (default is null). The tool returns a list of alerts, and the total number of alerts."; @Setter @Getter diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java index 364b5ad3..db042979 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java @@ -48,7 +48,8 @@ @ToolAnnotation(SearchAnomalyDetectorsTool.TYPE) public class SearchAnomalyDetectorsTool implements Tool { public static final String TYPE = "SearchAnomalyDetectorsTool"; - private static final String DEFAULT_DESCRIPTION = "Use this tool to search anomaly detectors."; + private static final String DEFAULT_DESCRIPTION = + "This is a tool that searches anomaly detectors. It takes 12 optional arguments named detectorName which is the explicit name of the monitor (default is null), and detectorNamePattern which is a wildcard query to match detector name (default is null), and indices which defines the index being detected (default is null), and highCardinality which defines whether the anomaly detector is high cardinality (default is null), and lastUpdateTime which defines the latest update time of the anomaly detector (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0), and running which defines whether the anomaly detector is running (default is null, indicating both), and disabled which defines whether the anomaly detector is disabled (default is null, indicating both), and failed which defines whether the anomaly detector has failed (default is null, indicating both). The tool returns the list of anomaly detectors, and the total number of anomaly detectors."; @Setter @Getter diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java index ef6a3a04..86c62918 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java @@ -39,7 +39,8 @@ @ToolAnnotation(SearchAnomalyResultsTool.TYPE) public class SearchAnomalyResultsTool implements Tool { public static final String TYPE = "SearchAnomalyResultsTool"; - private static final String DEFAULT_DESCRIPTION = "Use this tool to search anomaly results."; + private static final String DEFAULT_DESCRIPTION = + "This is a tool that searches anomaly results. It takes 9 arguments named detectorId which defines the detector ID to filter for (default is null), and realtime which defines whether the anomaly is real time, and anomalyGradeThreshold which defines the threshold for anomaly grade (a number between 0 and 1 that indicates how anomalous a data point is) (default is 0), and dataStartTime which defines the start time of the anomaly query (default is null), and dataEndTime which defines the end time of the anomaly query (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is desc), and sortString which which defines how to sort the results (default is data_start_time), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0). The tool returns a list of anomaly results, and the total number of anomaly result."; @Setter @Getter diff --git a/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java index a737bab2..91f25182 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java @@ -44,7 +44,8 @@ @ToolAnnotation(SearchMonitorsTool.TYPE) public class SearchMonitorsTool implements Tool { public static final String TYPE = "SearchMonitorsTool"; - private static final String DEFAULT_DESCRIPTION = "Use this tool to search alerting monitors."; + private static final String DEFAULT_DESCRIPTION = + "This is a tool that searches alerting monitors. It takes 10 optional arguments named monitorId which defines the monitor ID to filter for (default is null), and monitorName which defines explicit name of the monitor (default is null), and monitorNamePattern which is a wildcard query to match detector name (default is null), and enabled which defines whether the monitor is enabled (default is null, indicating both), and hasTriggers which defines whether the monitor has triggers enabled (default is null, indicating both), and indices which defines the index being monitored (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0). The tool returns a list of monitors, and the total number of monitors."; @Setter @Getter From 6860ae91a21a458e39369e8e270d1c7777fc1ad2 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Tue, 16 Jan 2024 13:06:24 +0800 Subject: [PATCH 032/119] Add default prompt to ppl tool (#125) (#129) * add default prompt for ppl tool * fix Upper problem * change wrong information * remove uesless log * add corresponding UTs * apply spotless * use locale instead * move dict to static * move dict to static * replace throw error with error log * add default value for PPL model type * apply spotless --------- (cherry picked from commit 7e4c8d583d910d4d0478fe78c8ffe96549042dc0) Signed-off-by: xinyual Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../org/opensearch/agent/tools/PPLTool.java | 62 ++++++++++++++++++- .../agent/tools/PPLDefaultPrompt.json | 4 ++ .../opensearch/agent/tools/PPLToolTests.java | 34 +++++++--- 3 files changed, 89 insertions(+), 11 deletions(-) create mode 100644 src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index 6a3e2fcf..5cc2b23a 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -7,6 +7,9 @@ import static org.opensearch.ml.common.CommonValue.*; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; @@ -14,6 +17,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.StringJoiner; import java.util.regex.Matcher; @@ -83,12 +87,48 @@ public class PPLTool implements Tool { private String contextPrompt; + private PPLModelType pplModelType; + private static Gson gson = new Gson(); - public PPLTool(Client client, String modelId, String contextPrompt) { + private static Map defaultPromptDict; + + static { + try { + defaultPromptDict = loadDefaultPromptDict(); + } catch (IOException e) { + log.error("fail to load default prompt dict" + e.getMessage()); + defaultPromptDict = new HashMap<>(); + } + } + + public enum PPLModelType { + CLAUDE, + FINETUNE; + + public static PPLModelType from(String value) { + if (value.isEmpty()) { + return PPLModelType.CLAUDE; + } + try { + return PPLModelType.valueOf(value.toUpperCase(Locale.ROOT)); + } catch (Exception e) { + log.error("Wrong PPL Model type, should be CLAUDE or FINETUNE"); + return PPLModelType.CLAUDE; + } + } + + } + + public PPLTool(Client client, String modelId, String contextPrompt, String pplModelType) { this.client = client; this.modelId = modelId; - this.contextPrompt = contextPrompt; + this.pplModelType = PPLModelType.from(pplModelType); + if (contextPrompt.isEmpty()) { + this.contextPrompt = this.defaultPromptDict.getOrDefault(this.pplModelType.toString(), ""); + } else { + this.contextPrompt = contextPrompt; + } } @Override @@ -206,7 +246,12 @@ public void init(Client client) { @Override public PPLTool create(Map map) { - return new PPLTool(client, (String) map.get("model_id"), (String) map.get("prompt")); + return new PPLTool( + client, + (String) map.get("model_id"), + (String) map.getOrDefault("prompt", ""), + (String) map.getOrDefault("model_type", "") + ); } @Override @@ -223,6 +268,7 @@ public String getDefaultType() { public String getDefaultVersion() { return null; } + } private SearchRequest buildSearchRequest(String indexName) { @@ -371,4 +417,14 @@ private String parseOutput(String llmOutput, String indexName) { return ppl; } + private static Map loadDefaultPromptDict() throws IOException { + InputStream searchResponseIns = PPLTool.class.getResourceAsStream("PPLDefaultPrompt.json"); + if (searchResponseIns != null) { + String defaultPromptContent = new String(searchResponseIns.readAllBytes(), StandardCharsets.UTF_8); + Map defaultPromptDict = gson.fromJson(defaultPromptContent, Map.class); + return defaultPromptDict; + } + return new HashMap<>(); + } + } diff --git a/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json b/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json new file mode 100644 index 00000000..dfcfc97f --- /dev/null +++ b/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json @@ -0,0 +1,4 @@ +{ + "CLAUDE": "\n\nHuman:You will be given a question about some metrics from a user.\nUse context provided to write a PPL query that can be used to retrieve the information.\n\nHere is a sample PPL query:\nsource=\\`\\` | where \\`\\` = '\\`\\`'\n\nHere are some sample questions and the PPL query to retrieve the information. The format for fields is\n\\`\\`\\`\n- field_name: field_type (sample field value)\n\\`\\`\\`\n\nFor example, below is a field called \\`timestamp\\`, it has a field type of \\`date\\`, and a sample value of it could look like \\`1686000665919\\`.\n\\`\\`\\`\n- timestamp: date (1686000665919)\n\\`\\`\\`\n----------------\n\nThe following text contains fields and questions/answers for the 'accounts' index\n\nFields:\n- account_number: long (101)\n- address: text ('880 Holmes Lane')\n- age: long (32)\n- balance: long (39225)\n- city: text ('Brogan')\n- email: text ('amberduke@pyrami.com')\n- employer: text ('Pyrami')\n- firstname: text ('Amber')\n- gender: text ('M')\n- lastname: text ('Duke')\n- state: text ('IL')\n- registered_at: date (1686000665919)\n\nQuestion: Give me some documents in index 'accounts'\nPPL: source=\\`accounts\\` | head\n\nQuestion: Give me 5 oldest people in index 'accounts'\nPPL: source=\\`accounts\\` | sort -age | head 5\n\nQuestion: Give me first names of 5 youngest people in index 'accounts'\nPPL: source=\\`accounts\\` | sort +age | head 5 | fields \\`firstname\\`\n\nQuestion: Give me some addresses in index 'accounts'\nPPL: source=\\`accounts\\` | fields \\`address\\`\n\nQuestion: Find the documents in index 'accounts' where firstname is 'Hattie'\nPPL: source=\\`accounts\\` | where \\`firstname\\` = 'Hattie'\n\nQuestion: Find the emails where firstname is 'Hattie' or lastname is 'Frank' in index 'accounts'\nPPL: source=\\`accounts\\` | where \\`firstname\\` = 'Hattie' OR \\`lastname\\` = 'frank' | fields \\`email\\`\n\nQuestion: Find the documents in index 'accounts' where firstname is not 'Hattie' and lastname is not 'Frank'\nPPL: source=\\`accounts\\` | where \\`firstname\\` != 'Hattie' AND \\`lastname\\` != 'frank'\n\nQuestion: Find the emails that contain '.com' in index 'accounts'\nPPL: source=\\`accounts\\` | where QUERY_STRING(['email'], '.com') | fields \\`email\\`\n\nQuestion: Find the documents in index 'accounts' where there is an email\nPPL: source=\\`accounts\\` | where ISNOTNULL(\\`email\\`)\n\nQuestion: Count the number of documents in index 'accounts'\nPPL: source=\\`accounts\\` | stats COUNT() AS \\`count\\`\n\nQuestion: Count the number of people with firstnaQuestion: Count the number of people withe=\\`accounts\\` | where \\`firstname\\` ='Amber' | stats COUNT() AS \\`count\\`\n\nQuestion: How many people are older than 33? index is 'accounts'\nPPL: source=\\`accounts\\` | where \\`age\\` > 33 | stats COUNT() AS \\`count\\`\n\nQuestion: How many distinct ages? index is 'accounts'\nPPL: source=\\`accounts\\` | stats DISTINCT_COUNT(age) AS \\`distinct_count\\`\n\nQuestion: How many males and females in index 'accounts'?\nPPL: source=\\`accounts\\` | stats COUNT() AS \\`count\\` BY \\`gender\\`\n\nQuestion: What is the average, minimum, maximum age in 'accounts' index?\nPPL: source=\\`accounts\\` | stats AVG(\\`age\\`) AS \\`avg_age\\`, MIN(\\`age\\`) AS \\`min_age\\`, MAX(\\`age\\`) AS \\`max_age\\`\n\nQuestion: Show all states sorted by average balance. index is 'accounts'\nPPL: source=\\`accounts\\` | stats AVG(\\`balance\\`) AS \\`avg_balance\\` BY \\`state\\` | sort +avg_balance\n\n----------------\n\nThe following text contains fields and questions/answers for the 'ecommerce' index\n\nFields:\n- category: text ('Men's Clothing')\n- currency: keyword ('EUR')\n- customer_birth_date: date (null)\n- customer_first_name: text ('Eddie')\n- customer_full_name: text ('Eddie Underwood')\n- customer_gender: keyword ('MALE')\n- customer_id: keyword ('38')\n- customer_last_name: text ('Underwood')\n- customer_phone: keyword ('')\n- day_of_week: keyword ('Monday')\n- day_of_week_i: integer (0)\n- email: keyword ('eddie@underwood-family.zzz')\n- event.dataset: keyword ('sample_ecommerce')\n- geoip.city_name: keyword ('Cairo')\n- geoip.continent_name: keyword ('Africa')\n- geoip.country_iso_code: keyword ('EG')\n- geoip.location: geo_point ([object Object])\n- geoip.region_name: keyword ('Cairo Governorate')\n- manufacturer: text ('Elitelligence,Oceanavigations')\n- order_date: date (2023-06-05T09:28:48+00:00)\n- order_id: keyword ('584677')\n- products._id: text (null)\n- products.base_price: half_float (null)\n- products.base_unit_price: half_float (null)\n- products.category: text (null)\n- products.created_on: date (null)\n- products.discount_amount: half_float (null)\n- products.discount_percentage: half_float (null)\n- products.manufacturer: text (null)\n- products.min_price: half_float (null)\n- products.price: half_float (null)\n- products.product_id: long (null)\n- products.product_name: text (null)\n- products.quantity: integer (null)\n- products.sku: keyword (null)\n- products.tax_amount: half_float (null)\n- products.taxful_price: half_float (null)\n- products.taxless_price: half_float (null)\n- products.unit_discount_amount: half_float (null)\n- sku: keyword ('ZO0549605496,ZO0299602996')\n- taxful_total_price: half_float (36.98)\n- taxless_total_price: half_float (36.98)\n- total_quantity: integer (2)\n- total_unique_products: integer (2)\n- type: keyword ('order')\n- user: keyword ('eddie')\n\nQuestion: What is the average price of products in clothing category ordered in the last 7 days? index is 'ecommerce'\nPPL: source=\\`ecommerce\\` | where QUERY_STRING(['category'], 'clothing') AND \\`order_date\\` > DATE_SUB(NOW(), INTERVAL 7 DAY) | stats AVG(\\`taxful_total_price\\`) AS \\`avg_price\\`\n\nQuestion: What is the average price of products in each city ordered today by every 2 hours? index is 'ecommerce'\nPPL: source=\\`ecommerce\\` | where \\`order_date\\` > DATE_SUB(NOW(), INTERVAL 24 HOUR) | stats AVG(\\`taxful_total_price\\`) AS \\`avg_price\\` by SPAN(\\`order_date\\`, 2h) AS \\`span\\`, \\`geoip.city_name\\`\n\nQuestion: What is the total revenue of shoes each day in this week? index is 'ecommerce'\nPPL: source=\\`ecommerce\\` | where QUERY_STRING(['category'], 'shoes') AND \\`order_date\\` > DATE_SUB(NOW(), INTERVAL 1 WEEK) | stats SUM(\\`taxful_total_price\\`) AS \\`revenue\\` by SPAN(\\`order_date\\`, 1d) AS \\`span\\`\n\n----------------\n\nThe following text contains fields and questions/answers for the 'events' index\nFields:\n- timestamp: long (1686000665919)\n- attributes.data_stream.dataset: text ('nginx.access')\n- attributes.data_stream.namespace: text ('production')\n- attributes.data_stream.type: text ('logs')\n- body: text ('172.24.0.1 - - [02/Jun/2023:23:09:27 +0000] 'GET / HTTP/1.1' 200 4955 '-' 'Mozilla/5.0 zgrab/0.x'')\n- communication.source.address: text ('127.0.0.1')\n- communication.source.ip: text ('172.24.0.1')\n- container_id: text (null)\n- container_name: text (null)\n- event.category: text ('web')\n- event.domain: text ('nginx.access')\n- event.kind: text ('event')\n- event.name: text ('access')\n- event.result: text ('success')\n- event.type: text ('access')\n- http.flavor: text ('1.1')\n- http.request.method: text ('GET')\n- http.response.bytes: long (4955)\n- http.response.status_code: keyword ('200')\n- http.url: text ('/')\n- log: text (null)\n- observerTime: date (1686000665919)\n- source: text (null)\n- span_id: text ('abcdef1010')\n- trace_id: text ('102981ABCD2901')\n\nQuestion: What are recent logs with errors and contains word 'test'? index is 'events'\nPPL: source=\\`events\\` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') AND QUERY_STRING(['body'], 'test') AND \\`observerTime\\` > DATE_SUB(NOW(), INTERVAL 5 MINUTE)\n\nQuestion: What is the total number of log with a status code other than 200 in 2023 Feburary? index is 'events'\nPPL: source=\\`events\\` | where QUERY_STRING(['http.response.status_code'], '!200') AND \\`observerTime\\` >= '2023-03-01 00:00:00' AND \\`observerTime\\` < '2023-04-01 00:00:00' | stats COUNT() AS \\`count\\`\n\nQuestion: Count the number of business days that have web category logs last week? index is 'events'\nPPL: source=\\`events\\` | where \\`category\\` = 'web' AND \\`observerTime\\` > DATE_SUB(NOW(), INTERVAL 1 WEEK) AND DAY_OF_WEEK(\\`observerTime\\`) >= 2 AND DAY_OF_WEEK(\\`observerTime\\`) <= 6 | stats DISTINCT_COUNT(DATE_FORMAT(\\`observerTime\\`, 'yyyy-MM-dd')) AS \\`distinct_count\\`\n\nQuestion: What are the top traces with largest bytes? index is 'events'\nPPL: source=\\`events\\` | stats SUM(\\`http.response.bytes\\`) AS \\`sum_bytes\\` by \\`trace_id\\` | sort -sum_bytes | head\n\nQuestion: Give me log patterns? index is 'events'\nPPL: source=\\`events\\` | patterns \\`body\\` | stats take(\\`body\\`, 1) AS \\`sample_pattern\\` by \\`patterns_field\\` | fields \\`sample_pattern\\`\n\nQuestion: Give me log patterns for logs with errors? index is 'events'\nPPL: source=\\`events\\` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') | patterns \\`body\\` | stats take(\\`body\\`, 1) AS \\`sample_pattern\\` by \\`patterns_field\\` | fields \\`sample_pattern\\`\n\n----------------\n\nUse the following steps to generate the PPL query:\n\nStep 1. Find all field entities in the question.\n\nStep 2. Pick the fields that are relevant to the question from the provided fields list using entities. Rules:\n#01 Consider the field name, the field type, and the sample value when picking relevant fields. For example, if you need to filter flights departed from 'JFK', look for a \\`text\\` or \\`keyword\\` field with a field name such as 'departedAirport', and the sample value should be a 3 letter IATA airport code. Similarly, if you need a date field, look for a relevant field name with type \\`date\\` and not \\`long\\`.\n#02 You must pick a field with \\`date\\` type when filtering on date/time.\n#03 You must pick a field with \\`date\\` type when aggregating by time interval.\n#04 You must not use the sample value in PPL query, unless it is relevant to the question.\n#05 You must only pick fields that are relevant, and must pick the whole field name from the fields list.\n#06 You must not use fields that are not in the fields list.\n#07 You must not use the sample values unless relevant to the question.\n#08 You must pick the field that contains a log line when asked about log patterns. Usually it is one of \\`log\\`, \\`body\\`, \\`message\\`.\n\nStep 3. Use the choosen fields to write the PPL query. Rules:\n#01 Always use comparisons to filter date/time, eg. 'where \\`timestamp\\` > DATE_SUB(NOW(), INTERVAL 1 DAY)'; or by absolute time: 'where \\`timestamp\\` > 'yyyy-MM-dd HH:mm:ss'', eg. 'where \\`timestamp\\` < '2023-01-01 00:00:00''. Do not use \\`DATE_FORMAT()\\`.\n#02 Only use PPL syntax and keywords appeared in the question or in the examples.\n#03 If user asks for current or recent status, filter the time field for last 5 minutes.\n#04 The field used in 'SPAN(\\`\\`, )' must have type \\`date\\`, not \\`long\\`.\n#05 When aggregating by \\`SPAN\\` and another field, put \\`SPAN\\` after \\`by\\` and before the other field, eg. 'stats COUNT() AS \\`count\\` by SPAN(\\`timestamp\\`, 1d) AS \\`span\\`, \\`category\\`'.\n#06 You must put values in quotes when filtering fields with \\`text\\` or \\`keyword\\` field type.\n#07 To find documents that contain certain phrases in string fields, use \\`QUERY_STRING\\` which supports multiple fields and wildcard, eg. 'where QUERY_STRING(['field1', 'field2'], 'prefix*')'.\n#08 To find 4xx and 5xx errors using status code, if the status code field type is numberic (eg. \\`integer\\`), then use 'where \\`status_code\\` >= 400'; if the field is a string (eg. \\`text\\` or \\`keyword\\`), then use 'where QUERY_STRING(['status_code'], '4* OR 5*')'.\n\n----------------\nPlease only contain PPL inside your response.\n----------------\nQuestion: ${indexInfo.question}? index is \\`${indexInfo.indexName}\\`\nFields:\n${indexInfo.mappingInfo}\n\nAssistant:", + "FINETUNE": "Below is an instruction that describes a task, paired with the index and corresponding fields that provides further context. Write a response that appropriately completes the request.\n\n### Instruction:\nI have an opensearch index with fields in the following. Now I have a question: ${indexInfo.question} Can you help me generate a PPL for that?\n\n### Index:\n${indexInfo.indexName}\n\n### Fields:\n${indexInfo.mappingInfo}\n\n### Response:\n" +} \ No newline at end of file diff --git a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java index e3d725e1..680586d0 100644 --- a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java @@ -35,7 +35,6 @@ import org.opensearch.ml.common.output.model.ModelTensor; import org.opensearch.ml.common.output.model.ModelTensorOutput; import org.opensearch.ml.common.output.model.ModelTensors; -import org.opensearch.ml.common.spi.tools.Tool; import org.opensearch.ml.common.transport.MLTaskResponse; import org.opensearch.ml.common.transport.prediction.MLPredictionTaskAction; import org.opensearch.search.SearchHit; @@ -128,7 +127,20 @@ public void setup() { @Test public void testTool() { - Tool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + assertEquals(PPLTool.TYPE, tool.getName()); + + tool.run(ImmutableMap.of("index", "demo", "question", "demo"), ActionListener.wrap(executePPLResult -> { + Map returnResults = gson.fromJson(executePPLResult, Map.class); + assertEquals("ppl result", returnResults.get("executionResult")); + assertEquals("source=demo| head 1", returnResults.get("ppl")); + }, e -> { log.info(e); })); + + } + + @Test + public void testTool_with_DefaultPrompt() { + PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "model_type", "claude")); assertEquals(PPLTool.TYPE, tool.getName()); tool.run(ImmutableMap.of("index", "demo", "question", "demo"), ActionListener.wrap(executePPLResult -> { @@ -141,7 +153,7 @@ public void testTool() { @Test public void testTool_withPPLTag() { - Tool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); assertEquals(PPLTool.TYPE, tool.getName()); pplReturns = Collections.singletonMap("response", "source=demo\n|\n\rhead 1"); @@ -158,7 +170,7 @@ public void testTool_withPPLTag() { @Test public void testTool_querySystemIndex() { - Tool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); assertEquals(PPLTool.TYPE, tool.getName()); Exception exception = assertThrows( IllegalArgumentException.class, @@ -173,9 +185,15 @@ public void testTool_querySystemIndex() { ); } + @Test + public void testTool_WrongModelType() { + PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "model_type", "wrong_model_type")); + assertEquals(PPLTool.PPLModelType.CLAUDE, tool.getPplModelType()); + } + @Test public void testTool_getMappingFailure() { - Tool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); assertEquals(PPLTool.TYPE, tool.getName()); Exception exception = new Exception("get mapping error"); doAnswer(invocation -> { @@ -195,7 +213,7 @@ public void testTool_getMappingFailure() { @Test public void testTool_predictModelFailure() { - Tool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); assertEquals(PPLTool.TYPE, tool.getName()); Exception exception = new Exception("predict model error"); doAnswer(invocation -> { @@ -215,7 +233,7 @@ public void testTool_predictModelFailure() { @Test public void testTool_searchFailure() { - Tool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); assertEquals(PPLTool.TYPE, tool.getName()); Exception exception = new Exception("search error"); doAnswer(invocation -> { @@ -235,7 +253,7 @@ public void testTool_searchFailure() { @Test public void testTool_executePPLFailure() { - Tool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); assertEquals(PPLTool.TYPE, tool.getName()); Exception exception = new Exception("execute ppl error"); doAnswer(invocation -> { From 097d11c01fa3be977598be1891baff04a1e143bb Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Thu, 18 Jan 2024 12:31:26 +0800 Subject: [PATCH 033/119] fix prompt (#133) (#134) (cherry picked from commit e278d8d6a258dde3447c6d8b72ea88bc723f87f9) Signed-off-by: xinyual Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../resources/org/opensearch/agent/tools/PPLDefaultPrompt.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json b/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json index dfcfc97f..9fda5865 100644 --- a/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json +++ b/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json @@ -1,4 +1,4 @@ { - "CLAUDE": "\n\nHuman:You will be given a question about some metrics from a user.\nUse context provided to write a PPL query that can be used to retrieve the information.\n\nHere is a sample PPL query:\nsource=\\`\\` | where \\`\\` = '\\`\\`'\n\nHere are some sample questions and the PPL query to retrieve the information. The format for fields is\n\\`\\`\\`\n- field_name: field_type (sample field value)\n\\`\\`\\`\n\nFor example, below is a field called \\`timestamp\\`, it has a field type of \\`date\\`, and a sample value of it could look like \\`1686000665919\\`.\n\\`\\`\\`\n- timestamp: date (1686000665919)\n\\`\\`\\`\n----------------\n\nThe following text contains fields and questions/answers for the 'accounts' index\n\nFields:\n- account_number: long (101)\n- address: text ('880 Holmes Lane')\n- age: long (32)\n- balance: long (39225)\n- city: text ('Brogan')\n- email: text ('amberduke@pyrami.com')\n- employer: text ('Pyrami')\n- firstname: text ('Amber')\n- gender: text ('M')\n- lastname: text ('Duke')\n- state: text ('IL')\n- registered_at: date (1686000665919)\n\nQuestion: Give me some documents in index 'accounts'\nPPL: source=\\`accounts\\` | head\n\nQuestion: Give me 5 oldest people in index 'accounts'\nPPL: source=\\`accounts\\` | sort -age | head 5\n\nQuestion: Give me first names of 5 youngest people in index 'accounts'\nPPL: source=\\`accounts\\` | sort +age | head 5 | fields \\`firstname\\`\n\nQuestion: Give me some addresses in index 'accounts'\nPPL: source=\\`accounts\\` | fields \\`address\\`\n\nQuestion: Find the documents in index 'accounts' where firstname is 'Hattie'\nPPL: source=\\`accounts\\` | where \\`firstname\\` = 'Hattie'\n\nQuestion: Find the emails where firstname is 'Hattie' or lastname is 'Frank' in index 'accounts'\nPPL: source=\\`accounts\\` | where \\`firstname\\` = 'Hattie' OR \\`lastname\\` = 'frank' | fields \\`email\\`\n\nQuestion: Find the documents in index 'accounts' where firstname is not 'Hattie' and lastname is not 'Frank'\nPPL: source=\\`accounts\\` | where \\`firstname\\` != 'Hattie' AND \\`lastname\\` != 'frank'\n\nQuestion: Find the emails that contain '.com' in index 'accounts'\nPPL: source=\\`accounts\\` | where QUERY_STRING(['email'], '.com') | fields \\`email\\`\n\nQuestion: Find the documents in index 'accounts' where there is an email\nPPL: source=\\`accounts\\` | where ISNOTNULL(\\`email\\`)\n\nQuestion: Count the number of documents in index 'accounts'\nPPL: source=\\`accounts\\` | stats COUNT() AS \\`count\\`\n\nQuestion: Count the number of people with firstnaQuestion: Count the number of people withe=\\`accounts\\` | where \\`firstname\\` ='Amber' | stats COUNT() AS \\`count\\`\n\nQuestion: How many people are older than 33? index is 'accounts'\nPPL: source=\\`accounts\\` | where \\`age\\` > 33 | stats COUNT() AS \\`count\\`\n\nQuestion: How many distinct ages? index is 'accounts'\nPPL: source=\\`accounts\\` | stats DISTINCT_COUNT(age) AS \\`distinct_count\\`\n\nQuestion: How many males and females in index 'accounts'?\nPPL: source=\\`accounts\\` | stats COUNT() AS \\`count\\` BY \\`gender\\`\n\nQuestion: What is the average, minimum, maximum age in 'accounts' index?\nPPL: source=\\`accounts\\` | stats AVG(\\`age\\`) AS \\`avg_age\\`, MIN(\\`age\\`) AS \\`min_age\\`, MAX(\\`age\\`) AS \\`max_age\\`\n\nQuestion: Show all states sorted by average balance. index is 'accounts'\nPPL: source=\\`accounts\\` | stats AVG(\\`balance\\`) AS \\`avg_balance\\` BY \\`state\\` | sort +avg_balance\n\n----------------\n\nThe following text contains fields and questions/answers for the 'ecommerce' index\n\nFields:\n- category: text ('Men's Clothing')\n- currency: keyword ('EUR')\n- customer_birth_date: date (null)\n- customer_first_name: text ('Eddie')\n- customer_full_name: text ('Eddie Underwood')\n- customer_gender: keyword ('MALE')\n- customer_id: keyword ('38')\n- customer_last_name: text ('Underwood')\n- customer_phone: keyword ('')\n- day_of_week: keyword ('Monday')\n- day_of_week_i: integer (0)\n- email: keyword ('eddie@underwood-family.zzz')\n- event.dataset: keyword ('sample_ecommerce')\n- geoip.city_name: keyword ('Cairo')\n- geoip.continent_name: keyword ('Africa')\n- geoip.country_iso_code: keyword ('EG')\n- geoip.location: geo_point ([object Object])\n- geoip.region_name: keyword ('Cairo Governorate')\n- manufacturer: text ('Elitelligence,Oceanavigations')\n- order_date: date (2023-06-05T09:28:48+00:00)\n- order_id: keyword ('584677')\n- products._id: text (null)\n- products.base_price: half_float (null)\n- products.base_unit_price: half_float (null)\n- products.category: text (null)\n- products.created_on: date (null)\n- products.discount_amount: half_float (null)\n- products.discount_percentage: half_float (null)\n- products.manufacturer: text (null)\n- products.min_price: half_float (null)\n- products.price: half_float (null)\n- products.product_id: long (null)\n- products.product_name: text (null)\n- products.quantity: integer (null)\n- products.sku: keyword (null)\n- products.tax_amount: half_float (null)\n- products.taxful_price: half_float (null)\n- products.taxless_price: half_float (null)\n- products.unit_discount_amount: half_float (null)\n- sku: keyword ('ZO0549605496,ZO0299602996')\n- taxful_total_price: half_float (36.98)\n- taxless_total_price: half_float (36.98)\n- total_quantity: integer (2)\n- total_unique_products: integer (2)\n- type: keyword ('order')\n- user: keyword ('eddie')\n\nQuestion: What is the average price of products in clothing category ordered in the last 7 days? index is 'ecommerce'\nPPL: source=\\`ecommerce\\` | where QUERY_STRING(['category'], 'clothing') AND \\`order_date\\` > DATE_SUB(NOW(), INTERVAL 7 DAY) | stats AVG(\\`taxful_total_price\\`) AS \\`avg_price\\`\n\nQuestion: What is the average price of products in each city ordered today by every 2 hours? index is 'ecommerce'\nPPL: source=\\`ecommerce\\` | where \\`order_date\\` > DATE_SUB(NOW(), INTERVAL 24 HOUR) | stats AVG(\\`taxful_total_price\\`) AS \\`avg_price\\` by SPAN(\\`order_date\\`, 2h) AS \\`span\\`, \\`geoip.city_name\\`\n\nQuestion: What is the total revenue of shoes each day in this week? index is 'ecommerce'\nPPL: source=\\`ecommerce\\` | where QUERY_STRING(['category'], 'shoes') AND \\`order_date\\` > DATE_SUB(NOW(), INTERVAL 1 WEEK) | stats SUM(\\`taxful_total_price\\`) AS \\`revenue\\` by SPAN(\\`order_date\\`, 1d) AS \\`span\\`\n\n----------------\n\nThe following text contains fields and questions/answers for the 'events' index\nFields:\n- timestamp: long (1686000665919)\n- attributes.data_stream.dataset: text ('nginx.access')\n- attributes.data_stream.namespace: text ('production')\n- attributes.data_stream.type: text ('logs')\n- body: text ('172.24.0.1 - - [02/Jun/2023:23:09:27 +0000] 'GET / HTTP/1.1' 200 4955 '-' 'Mozilla/5.0 zgrab/0.x'')\n- communication.source.address: text ('127.0.0.1')\n- communication.source.ip: text ('172.24.0.1')\n- container_id: text (null)\n- container_name: text (null)\n- event.category: text ('web')\n- event.domain: text ('nginx.access')\n- event.kind: text ('event')\n- event.name: text ('access')\n- event.result: text ('success')\n- event.type: text ('access')\n- http.flavor: text ('1.1')\n- http.request.method: text ('GET')\n- http.response.bytes: long (4955)\n- http.response.status_code: keyword ('200')\n- http.url: text ('/')\n- log: text (null)\n- observerTime: date (1686000665919)\n- source: text (null)\n- span_id: text ('abcdef1010')\n- trace_id: text ('102981ABCD2901')\n\nQuestion: What are recent logs with errors and contains word 'test'? index is 'events'\nPPL: source=\\`events\\` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') AND QUERY_STRING(['body'], 'test') AND \\`observerTime\\` > DATE_SUB(NOW(), INTERVAL 5 MINUTE)\n\nQuestion: What is the total number of log with a status code other than 200 in 2023 Feburary? index is 'events'\nPPL: source=\\`events\\` | where QUERY_STRING(['http.response.status_code'], '!200') AND \\`observerTime\\` >= '2023-03-01 00:00:00' AND \\`observerTime\\` < '2023-04-01 00:00:00' | stats COUNT() AS \\`count\\`\n\nQuestion: Count the number of business days that have web category logs last week? index is 'events'\nPPL: source=\\`events\\` | where \\`category\\` = 'web' AND \\`observerTime\\` > DATE_SUB(NOW(), INTERVAL 1 WEEK) AND DAY_OF_WEEK(\\`observerTime\\`) >= 2 AND DAY_OF_WEEK(\\`observerTime\\`) <= 6 | stats DISTINCT_COUNT(DATE_FORMAT(\\`observerTime\\`, 'yyyy-MM-dd')) AS \\`distinct_count\\`\n\nQuestion: What are the top traces with largest bytes? index is 'events'\nPPL: source=\\`events\\` | stats SUM(\\`http.response.bytes\\`) AS \\`sum_bytes\\` by \\`trace_id\\` | sort -sum_bytes | head\n\nQuestion: Give me log patterns? index is 'events'\nPPL: source=\\`events\\` | patterns \\`body\\` | stats take(\\`body\\`, 1) AS \\`sample_pattern\\` by \\`patterns_field\\` | fields \\`sample_pattern\\`\n\nQuestion: Give me log patterns for logs with errors? index is 'events'\nPPL: source=\\`events\\` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') | patterns \\`body\\` | stats take(\\`body\\`, 1) AS \\`sample_pattern\\` by \\`patterns_field\\` | fields \\`sample_pattern\\`\n\n----------------\n\nUse the following steps to generate the PPL query:\n\nStep 1. Find all field entities in the question.\n\nStep 2. Pick the fields that are relevant to the question from the provided fields list using entities. Rules:\n#01 Consider the field name, the field type, and the sample value when picking relevant fields. For example, if you need to filter flights departed from 'JFK', look for a \\`text\\` or \\`keyword\\` field with a field name such as 'departedAirport', and the sample value should be a 3 letter IATA airport code. Similarly, if you need a date field, look for a relevant field name with type \\`date\\` and not \\`long\\`.\n#02 You must pick a field with \\`date\\` type when filtering on date/time.\n#03 You must pick a field with \\`date\\` type when aggregating by time interval.\n#04 You must not use the sample value in PPL query, unless it is relevant to the question.\n#05 You must only pick fields that are relevant, and must pick the whole field name from the fields list.\n#06 You must not use fields that are not in the fields list.\n#07 You must not use the sample values unless relevant to the question.\n#08 You must pick the field that contains a log line when asked about log patterns. Usually it is one of \\`log\\`, \\`body\\`, \\`message\\`.\n\nStep 3. Use the choosen fields to write the PPL query. Rules:\n#01 Always use comparisons to filter date/time, eg. 'where \\`timestamp\\` > DATE_SUB(NOW(), INTERVAL 1 DAY)'; or by absolute time: 'where \\`timestamp\\` > 'yyyy-MM-dd HH:mm:ss'', eg. 'where \\`timestamp\\` < '2023-01-01 00:00:00''. Do not use \\`DATE_FORMAT()\\`.\n#02 Only use PPL syntax and keywords appeared in the question or in the examples.\n#03 If user asks for current or recent status, filter the time field for last 5 minutes.\n#04 The field used in 'SPAN(\\`\\`, )' must have type \\`date\\`, not \\`long\\`.\n#05 When aggregating by \\`SPAN\\` and another field, put \\`SPAN\\` after \\`by\\` and before the other field, eg. 'stats COUNT() AS \\`count\\` by SPAN(\\`timestamp\\`, 1d) AS \\`span\\`, \\`category\\`'.\n#06 You must put values in quotes when filtering fields with \\`text\\` or \\`keyword\\` field type.\n#07 To find documents that contain certain phrases in string fields, use \\`QUERY_STRING\\` which supports multiple fields and wildcard, eg. 'where QUERY_STRING(['field1', 'field2'], 'prefix*')'.\n#08 To find 4xx and 5xx errors using status code, if the status code field type is numberic (eg. \\`integer\\`), then use 'where \\`status_code\\` >= 400'; if the field is a string (eg. \\`text\\` or \\`keyword\\`), then use 'where QUERY_STRING(['status_code'], '4* OR 5*')'.\n\n----------------\nPlease only contain PPL inside your response.\n----------------\nQuestion: ${indexInfo.question}? index is \\`${indexInfo.indexName}\\`\nFields:\n${indexInfo.mappingInfo}\n\nAssistant:", + "CLAUDE": "\n\nHuman:You will be given a question about some metrics from a user.\nUse context provided to write a PPL query that can be used to retrieve the information.\n\nHere is a sample PPL query:\nsource=\\`\\` | where \\`\\` = '\\`\\`'\n\nHere are some sample questions and the PPL query to retrieve the information. The format for fields is\n\\`\\`\\`\n- field_name: field_type (sample field value)\n\\`\\`\\`\n\nFor example, below is a field called \\`timestamp\\`, it has a field type of \\`date\\`, and a sample value of it could look like \\`1686000665919\\`.\n\\`\\`\\`\n- timestamp: date (1686000665919)\n\\`\\`\\`\n----------------\n\nThe following text contains fields and questions/answers for the 'accounts' index\n\nFields:\n- account_number: long (101)\n- address: text ('880 Holmes Lane')\n- age: long (32)\n- balance: long (39225)\n- city: text ('Brogan')\n- email: text ('amberduke@pyrami.com')\n- employer: text ('Pyrami')\n- firstname: text ('Amber')\n- gender: text ('M')\n- lastname: text ('Duke')\n- state: text ('IL')\n- registered_at: date (1686000665919)\n\nQuestion: Give me some documents in index 'accounts'\nPPL: source=\\`accounts\\` | head\n\nQuestion: Give me 5 oldest people in index 'accounts'\nPPL: source=\\`accounts\\` | sort -age | head 5\n\nQuestion: Give me first names of 5 youngest people in index 'accounts'\nPPL: source=\\`accounts\\` | sort +age | head 5 | fields \\`firstname\\`\n\nQuestion: Give me some addresses in index 'accounts'\nPPL: source=\\`accounts\\` | fields \\`address\\`\n\nQuestion: Find the documents in index 'accounts' where firstname is 'Hattie'\nPPL: source=\\`accounts\\` | where \\`firstname\\` = 'Hattie'\n\nQuestion: Find the emails where firstname is 'Hattie' or lastname is 'Frank' in index 'accounts'\nPPL: source=\\`accounts\\` | where \\`firstname\\` = 'Hattie' OR \\`lastname\\` = 'frank' | fields \\`email\\`\n\nQuestion: Find the documents in index 'accounts' where firstname is not 'Hattie' and lastname is not 'Frank'\nPPL: source=\\`accounts\\` | where \\`firstname\\` != 'Hattie' AND \\`lastname\\` != 'frank'\n\nQuestion: Find the emails that contain '.com' in index 'accounts'\nPPL: source=\\`accounts\\` | where QUERY_STRING(['email'], '.com') | fields \\`email\\`\n\nQuestion: Find the documents in index 'accounts' where there is an email\nPPL: source=\\`accounts\\` | where ISNOTNULL(\\`email\\`)\n\nQuestion: Count the number of documents in index 'accounts'\nPPL: source=\\`accounts\\` | stats COUNT() AS \\`count\\`\n\nQuestion: Count the number of people with firstnaQuestion: Count the number of people withe=\\`accounts\\` | where \\`firstname\\` ='Amber' | stats COUNT() AS \\`count\\`\n\nQuestion: How many people are older than 33? index is 'accounts'\nPPL: source=\\`accounts\\` | where \\`age\\` > 33 | stats COUNT() AS \\`count\\`\n\nQuestion: How many distinct ages? index is 'accounts'\nPPL: source=\\`accounts\\` | stats DISTINCT_COUNT(age) AS \\`distinct_count\\`\n\nQuestion: How many males and females in index 'accounts'?\nPPL: source=\\`accounts\\` | stats COUNT() AS \\`count\\` BY \\`gender\\`\n\nQuestion: What is the average, minimum, maximum age in 'accounts' index?\nPPL: source=\\`accounts\\` | stats AVG(\\`age\\`) AS \\`avg_age\\`, MIN(\\`age\\`) AS \\`min_age\\`, MAX(\\`age\\`) AS \\`max_age\\`\n\nQuestion: Show all states sorted by average balance. index is 'accounts'\nPPL: source=\\`accounts\\` | stats AVG(\\`balance\\`) AS \\`avg_balance\\` BY \\`state\\` | sort +avg_balance\n\n----------------\n\nThe following text contains fields and questions/answers for the 'ecommerce' index\n\nFields:\n- category: text ('Men's Clothing')\n- currency: keyword ('EUR')\n- customer_birth_date: date (null)\n- customer_first_name: text ('Eddie')\n- customer_full_name: text ('Eddie Underwood')\n- customer_gender: keyword ('MALE')\n- customer_id: keyword ('38')\n- customer_last_name: text ('Underwood')\n- customer_phone: keyword ('')\n- day_of_week: keyword ('Monday')\n- day_of_week_i: integer (0)\n- email: keyword ('eddie@underwood-family.zzz')\n- event.dataset: keyword ('sample_ecommerce')\n- geoip.city_name: keyword ('Cairo')\n- geoip.continent_name: keyword ('Africa')\n- geoip.country_iso_code: keyword ('EG')\n- geoip.location: geo_point ([object Object])\n- geoip.region_name: keyword ('Cairo Governorate')\n- manufacturer: text ('Elitelligence,Oceanavigations')\n- order_date: date (2023-06-05T09:28:48+00:00)\n- order_id: keyword ('584677')\n- products._id: text (null)\n- products.base_price: half_float (null)\n- products.base_unit_price: half_float (null)\n- products.category: text (null)\n- products.created_on: date (null)\n- products.discount_amount: half_float (null)\n- products.discount_percentage: half_float (null)\n- products.manufacturer: text (null)\n- products.min_price: half_float (null)\n- products.price: half_float (null)\n- products.product_id: long (null)\n- products.product_name: text (null)\n- products.quantity: integer (null)\n- products.sku: keyword (null)\n- products.tax_amount: half_float (null)\n- products.taxful_price: half_float (null)\n- products.taxless_price: half_float (null)\n- products.unit_discount_amount: half_float (null)\n- sku: keyword ('ZO0549605496,ZO0299602996')\n- taxful_total_price: half_float (36.98)\n- taxless_total_price: half_float (36.98)\n- total_quantity: integer (2)\n- total_unique_products: integer (2)\n- type: keyword ('order')\n- user: keyword ('eddie')\n\nQuestion: What is the average price of products in clothing category ordered in the last 7 days? index is 'ecommerce'\nPPL: source=\\`ecommerce\\` | where QUERY_STRING(['category'], 'clothing') AND \\`order_date\\` > DATE_SUB(NOW(), INTERVAL 7 DAY) | stats AVG(\\`taxful_total_price\\`) AS \\`avg_price\\`\n\nQuestion: What is the average price of products in each city ordered today by every 2 hours? index is 'ecommerce'\nPPL: source=\\`ecommerce\\` | where \\`order_date\\` > DATE_SUB(NOW(), INTERVAL 24 HOUR) | stats AVG(\\`taxful_total_price\\`) AS \\`avg_price\\` by SPAN(\\`order_date\\`, 2h) AS \\`span\\`, \\`geoip.city_name\\`\n\nQuestion: What is the total revenue of shoes each day in this week? index is 'ecommerce'\nPPL: source=\\`ecommerce\\` | where QUERY_STRING(['category'], 'shoes') AND \\`order_date\\` > DATE_SUB(NOW(), INTERVAL 1 WEEK) | stats SUM(\\`taxful_total_price\\`) AS \\`revenue\\` by SPAN(\\`order_date\\`, 1d) AS \\`span\\`\n\n----------------\n\nThe following text contains fields and questions/answers for the 'events' index\nFields:\n- timestamp: long (1686000665919)\n- attributes.data_stream.dataset: text ('nginx.access')\n- attributes.data_stream.namespace: text ('production')\n- attributes.data_stream.type: text ('logs')\n- body: text ('172.24.0.1 - - [02/Jun/2023:23:09:27 +0000] 'GET / HTTP/1.1' 200 4955 '-' 'Mozilla/5.0 zgrab/0.x'')\n- communication.source.address: text ('127.0.0.1')\n- communication.source.ip: text ('172.24.0.1')\n- container_id: text (null)\n- container_name: text (null)\n- event.category: text ('web')\n- event.domain: text ('nginx.access')\n- event.kind: text ('event')\n- event.name: text ('access')\n- event.result: text ('success')\n- event.type: text ('access')\n- http.flavor: text ('1.1')\n- http.request.method: text ('GET')\n- http.response.bytes: long (4955)\n- http.response.status_code: keyword ('200')\n- http.url: text ('/')\n- log: text (null)\n- observerTime: date (1686000665919)\n- source: text (null)\n- span_id: text ('abcdef1010')\n- trace_id: text ('102981ABCD2901')\n\nQuestion: What are recent logs with errors and contains word 'test'? index is 'events'\nPPL: source=\\`events\\` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') AND QUERY_STRING(['body'], 'test') AND \\`observerTime\\` > DATE_SUB(NOW(), INTERVAL 5 MINUTE)\n\nQuestion: What is the total number of log with a status code other than 200 in 2023 Feburary? index is 'events'\nPPL: source=\\`events\\` | where QUERY_STRING(['http.response.status_code'], '!200') AND \\`observerTime\\` >= '2023-03-01 00:00:00' AND \\`observerTime\\` < '2023-04-01 00:00:00' | stats COUNT() AS \\`count\\`\n\nQuestion: Count the number of business days that have web category logs last week? index is 'events'\nPPL: source=\\`events\\` | where \\`category\\` = 'web' AND \\`observerTime\\` > DATE_SUB(NOW(), INTERVAL 1 WEEK) AND DAY_OF_WEEK(\\`observerTime\\`) >= 2 AND DAY_OF_WEEK(\\`observerTime\\`) <= 6 | stats DISTINCT_COUNT(DATE_FORMAT(\\`observerTime\\`, 'yyyy-MM-dd')) AS \\`distinct_count\\`\n\nQuestion: What are the top traces with largest bytes? index is 'events'\nPPL: source=\\`events\\` | stats SUM(\\`http.response.bytes\\`) AS \\`sum_bytes\\` by \\`trace_id\\` | sort -sum_bytes | head\n\nQuestion: Give me log patterns? index is 'events'\nPPL: source=\\`events\\` | patterns \\`body\\` | stats take(\\`body\\`, 1) AS \\`sample_pattern\\` by \\`patterns_field\\` | fields \\`sample_pattern\\`\n\nQuestion: Give me log patterns for logs with errors? index is 'events'\nPPL: source=\\`events\\` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') | patterns \\`body\\` | stats take(\\`body\\`, 1) AS \\`sample_pattern\\` by \\`patterns_field\\` | fields \\`sample_pattern\\`\n\n----------------\n\nUse the following steps to generate the PPL query:\n\nStep 1. Find all field entities in the question.\n\nStep 2. Pick the fields that are relevant to the question from the provided fields list using entities. Rules:\n#01 Consider the field name, the field type, and the sample value when picking relevant fields. For example, if you need to filter flights departed from 'JFK', look for a \\`text\\` or \\`keyword\\` field with a field name such as 'departedAirport', and the sample value should be a 3 letter IATA airport code. Similarly, if you need a date field, look for a relevant field name with type \\`date\\` and not \\`long\\`.\n#02 You must pick a field with \\`date\\` type when filtering on date/time.\n#03 You must pick a field with \\`date\\` type when aggregating by time interval.\n#04 You must not use the sample value in PPL query, unless it is relevant to the question.\n#05 You must only pick fields that are relevant, and must pick the whole field name from the fields list.\n#06 You must not use fields that are not in the fields list.\n#07 You must not use the sample values unless relevant to the question.\n#08 You must pick the field that contains a log line when asked about log patterns. Usually it is one of \\`log\\`, \\`body\\`, \\`message\\`.\n\nStep 3. Use the choosen fields to write the PPL query. Rules:\n#01 Always use comparisons to filter date/time, eg. 'where \\`timestamp\\` > DATE_SUB(NOW(), INTERVAL 1 DAY)'; or by absolute time: 'where \\`timestamp\\` > 'yyyy-MM-dd HH:mm:ss'', eg. 'where \\`timestamp\\` < '2023-01-01 00:00:00''. Do not use \\`DATE_FORMAT()\\`.\n#02 Only use PPL syntax and keywords appeared in the question or in the examples.\n#03 If user asks for current or recent status, filter the time field for last 5 minutes.\n#04 The field used in 'SPAN(\\`\\`, )' must have type \\`date\\`, not \\`long\\`.\n#05 When aggregating by \\`SPAN\\` and another field, put \\`SPAN\\` after \\`by\\` and before the other field, eg. 'stats COUNT() AS \\`count\\` by SPAN(\\`timestamp\\`, 1d) AS \\`span\\`, \\`category\\`'.\n#06 You must put values in quotes when filtering fields with \\`text\\` or \\`keyword\\` field type.\n#07 To find documents that contain certain phrases in string fields, use \\`QUERY_STRING\\` which supports multiple fields and wildcard, eg. 'where QUERY_STRING(['field1', 'field2'], 'prefix*')'.\n#08 To find 4xx and 5xx errors using status code, if the status code field type is numberic (eg. \\`integer\\`), then use 'where \\`status_code\\` >= 400'; if the field is a string (eg. \\`text\\` or \\`keyword\\`), then use 'where QUERY_STRING(['status_code'], '4* OR 5*')'.\n\n----------------\nPut your PPL query in tags.\n----------------\nQuestion: ${indexInfo.question}? index is \\`${indexInfo.indexName}\\`\nFields:\n${indexInfo.mappingInfo}\n\nAssistant:", "FINETUNE": "Below is an instruction that describes a task, paired with the index and corresponding fields that provides further context. Write a response that appropriately completes the request.\n\n### Instruction:\nI have an opensearch index with fields in the following. Now I have a question: ${indexInfo.question} Can you help me generate a PPL for that?\n\n### Index:\n${indexInfo.indexName}\n\n### Fields:\n${indexInfo.mappingInfo}\n\n### Response:\n" } \ No newline at end of file From b24a6216504f5863cfad3a6bc4d35d4cdf471e97 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 16:03:41 -0800 Subject: [PATCH 034/119] Add alerting tools IT; fix missing system index bug of SearchMonitorsTool (#135) (#141) (cherry picked from commit 3e4d45106d10588c866899082b2493e17d26335e) Signed-off-by: Tyler Ohlsen Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- build.gradle | 3 +- .../agent/tools/SearchMonitorsTool.java | 83 ++++++++++++------- .../agent/tools/utils/ToolConstants.java | 6 +- .../integTest/SearchAlertsToolIT.java | 57 +++++++++++++ .../integTest/SearchMonitorsToolIT.java | 58 +++++++++++++ ...nt_of_search_alerts_tool_request_body.json | 10 +++ ..._of_search_monitors_tool_request_body.json | 10 +++ 7 files changed, 196 insertions(+), 31 deletions(-) create mode 100644 src/test/java/org/opensearch/integTest/SearchAlertsToolIT.java create mode 100644 src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java create mode 100644 src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_alerts_tool_request_body.json create mode 100644 src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_monitors_tool_request_body.json diff --git a/build.gradle b/build.gradle index 45b3f314..9d857f9c 100644 --- a/build.gradle +++ b/build.gradle @@ -129,6 +129,7 @@ dependencies { zipArchive group: 'org.opensearch.plugin', name:'opensearch-sql-plugin', version: "${version}" zipArchive group: 'org.opensearch.plugin', name:'opensearch-knn', version: "${version}" zipArchive group: 'org.opensearch.plugin', name:'neural-search', version: "${version}" + zipArchive group: 'org.opensearch.plugin', name:'alerting', version: "${version}" // Test dependencies testImplementation "org.opensearch.test:framework:${opensearch_version}" @@ -388,4 +389,4 @@ task updateVersion { // Include the required files that needs to be updated with new Version ant.replaceregexp(file:'build.gradle', match: '"opensearch.version", "\\d.*"', replace: '"opensearch.version", "' + newVersion.tokenize('-')[0] + '-SNAPSHOT"', flags:'g', byline:true) } -} +} \ No newline at end of file diff --git a/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java index 91f25182..8edcf22c 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java @@ -6,8 +6,11 @@ package org.opensearch.agent.tools; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.lucene.search.join.ScoreMode; @@ -20,6 +23,7 @@ import org.opensearch.commons.alerting.action.GetMonitorResponse; import org.opensearch.commons.alerting.action.SearchMonitorRequest; import org.opensearch.commons.alerting.model.Monitor; +import org.opensearch.commons.alerting.model.ScheduledJob; import org.opensearch.core.action.ActionListener; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.ExistsQueryBuilder; @@ -104,22 +108,17 @@ public void run(Map parameters, ActionListener listener) if (monitorId != null) { GetMonitorRequest getMonitorRequest = new GetMonitorRequest(monitorId, 1L, RestRequest.Method.GET, null); ActionListener getMonitorListener = ActionListener.wrap(response -> { - StringBuilder sb = new StringBuilder(); Monitor monitor = response.getMonitor(); - if (monitor != null) { - sb.append("Monitors=["); - sb.append("{"); - sb.append("id=").append(monitor.getId()).append(","); - sb.append("name=").append(monitor.getName()); - sb.append("}]"); - sb.append("TotalMonitors=1"); + processGetMonitorHit(monitor, listener); + }, e -> { + // System index isn't initialized by default, so ignore such errors. Alerting plugin does not return the + // standard IndexNotFoundException so we parse the message instead + if (e.getMessage().contains("Configured indices are not found")) { + processGetMonitorHit(null, listener); } else { - sb.append("Monitors=[]TotalMonitors=0"); + log.error("Failed to get monitor.", e); + listener.onFailure(e); } - listener.onResponse((T) sb.toString()); - }, e -> { - log.error("Failed to search monitors.", e); - listener.onFailure(e); }); AlertingPluginInterface.INSTANCE.getMonitor((NodeClient) client, getMonitorRequest, getMonitorListener); } else { @@ -167,24 +166,23 @@ public void run(Map parameters, ActionListener listener) .from(startIndex) .sort(sortString, sortOrder); - SearchMonitorRequest searchMonitorRequest = new SearchMonitorRequest(new SearchRequest().source(searchSourceBuilder)); + SearchRequest searchRequest = new SearchRequest().source(searchSourceBuilder).indices(ScheduledJob.SCHEDULED_JOBS_INDEX); + SearchMonitorRequest searchMonitorRequest = new SearchMonitorRequest(searchRequest); ActionListener searchMonitorListener = ActionListener.wrap(response -> { - StringBuilder sb = new StringBuilder(); - SearchHit[] hits = response.getHits().getHits(); - sb.append("Monitors=["); - for (SearchHit hit : hits) { - sb.append("{"); - sb.append("id=").append(hit.getId()).append(","); - sb.append("name=").append(hit.getSourceAsMap().get("name")); - sb.append("}"); - } - sb.append("]"); - sb.append("TotalMonitors=").append(response.getHits().getTotalHits().value); - listener.onResponse((T) sb.toString()); + List hits = Arrays.asList(response.getHits().getHits()); + Map hitsAsMap = hits.stream().collect(Collectors.toMap(SearchHit::getId, hit -> hit)); + processHits(hitsAsMap, listener); + }, e -> { - log.error("Failed to search monitors.", e); - listener.onFailure(e); + // System index isn't initialized by default, so ignore such errors. Alerting plugin does not return the + // standard IndexNotFoundException so we parse the message instead + if (e.getMessage().contains("Configured indices are not found")) { + processHits(Collections.emptyMap(), listener); + } else { + log.error("Failed to search monitors.", e); + listener.onFailure(e); + } }); AlertingPluginInterface.INSTANCE.searchMonitors((NodeClient) client, searchMonitorRequest, searchMonitorListener); } @@ -200,6 +198,35 @@ public String getType() { return TYPE; } + private void processHits(Map hitsAsMap, ActionListener listener) { + StringBuilder sb = new StringBuilder(); + sb.append("Monitors=["); + for (SearchHit hit : hitsAsMap.values()) { + sb.append("{"); + sb.append("id=").append(hit.getId()).append(","); + sb.append("name=").append(hit.getSourceAsMap().get("name")); + sb.append("}"); + } + sb.append("]"); + sb.append("TotalMonitors=").append(hitsAsMap.size()); + listener.onResponse((T) sb.toString()); + } + + private void processGetMonitorHit(Monitor monitor, ActionListener listener) { + StringBuilder sb = new StringBuilder(); + if (monitor != null) { + sb.append("Monitors=["); + sb.append("{"); + sb.append("id=").append(monitor.getId()).append(","); + sb.append("name=").append(monitor.getName()); + sb.append("}]"); + sb.append("TotalMonitors=1"); + } else { + sb.append("Monitors=[]TotalMonitors=0"); + } + listener.onResponse((T) sb.toString()); + } + /** * Factory for the {@link SearchMonitorsTool} */ diff --git a/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java b/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java index 047075c2..e6d95afe 100644 --- a/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java +++ b/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java @@ -17,8 +17,10 @@ public static enum DetectorStateString { Initializing } - // System indices constants are not cleanly exposed from the AD plugin, so we persist our - // own constant here. + // System indices constants are not cleanly exposed from the AD & Alerting plugins, so we persist our + // own constants here. public static final String AD_RESULTS_INDEX_PATTERN = ".opendistro-anomaly-results*"; public static final String AD_DETECTORS_INDEX = ".opendistro-anomaly-detectors"; + + public static final String ALERTING_CONFIG_INDEX = ".opendistro-alerting-config"; } diff --git a/src/test/java/org/opensearch/integTest/SearchAlertsToolIT.java b/src/test/java/org/opensearch/integTest/SearchAlertsToolIT.java new file mode 100644 index 00000000..66e8f233 --- /dev/null +++ b/src/test/java/org/opensearch/integTest/SearchAlertsToolIT.java @@ -0,0 +1,57 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import java.nio.file.Files; +import java.nio.file.Path; + +import org.junit.After; +import org.junit.Before; + +import lombok.SneakyThrows; + +public class SearchAlertsToolIT extends BaseAgentToolsIT { + private String registerAgentRequestBody; + private static final String monitorId = "foo-id"; + private static final String monitorName = "foo-name"; + + @Before + @SneakyThrows + public void setUp() { + super.setUp(); + registerAgentRequestBody = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource("org/opensearch/agent/tools/register_flow_agent_of_search_alerts_tool_request_body.json") + .toURI() + ) + ); + } + + @After + @SneakyThrows + public void tearDown() { + super.tearDown(); + deleteExternalIndices(); + deleteSystemIndices(); + } + + @SneakyThrows + public void testSearchAlertsToolInFlowAgent_withNoSystemIndex() { + deleteSystemIndices(); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"monitorId\": \"" + monitorId + "\"}}"; + String result = executeAgent(agentId, agentInput); + assertEquals("Alerts=[]TotalAlerts=0", result); + } + + // TODO: Add IT to test against sample alerts data + // https://github.com/opensearch-project/skills/issues/136 +} diff --git a/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java b/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java new file mode 100644 index 00000000..c94bf8e8 --- /dev/null +++ b/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java @@ -0,0 +1,58 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.nio.file.Files; +import java.nio.file.Path; + +import org.junit.After; +import org.junit.Before; + +import lombok.SneakyThrows; + +public class SearchMonitorsToolIT extends BaseAgentToolsIT { + private String registerAgentRequestBody; + private static final String monitorId = "foo-id"; + private static final String monitorName = "foo-name"; + + @Before + @SneakyThrows + public void setUp() { + super.setUp(); + registerAgentRequestBody = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource("org/opensearch/agent/tools/register_flow_agent_of_search_monitors_tool_request_body.json") + .toURI() + ) + ); + } + + @After + @SneakyThrows + public void tearDown() { + super.tearDown(); + deleteExternalIndices(); + deleteSystemIndices(); + } + + @SneakyThrows + public void testSearchMonitorsToolInFlowAgent_withNoSystemIndex() { + deleteSystemIndices(); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"monitorName\": \"" + monitorName + "\"}}"; + String result = executeAgent(agentId, agentInput); + assertEquals("Monitors=[]TotalMonitors=0", result); + } + + // TODO: Add IT to test against sample monitor data +} diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_alerts_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_alerts_tool_request_body.json new file mode 100644 index 00000000..80d1146b --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_alerts_tool_request_body.json @@ -0,0 +1,10 @@ +{ + "name": "Test_Search_Alerts_Agent", + "type": "flow", + "tools": [ + { + "type": "SearchAlertsTool", + "description": "Use this tool to search alerts." + } + ] +} \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_monitors_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_monitors_tool_request_body.json new file mode 100644 index 00000000..d38d955f --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_monitors_tool_request_body.json @@ -0,0 +1,10 @@ +{ + "name": "Test_Search_Monitors_Agent", + "type": "flow", + "tools": [ + { + "type": "SearchMonitorsTool", + "description": "Use this tool to search alerting monitors." + } + ] +} \ No newline at end of file From 952cdd72b828b690e4c46a23c9bce3f9b2d43f59 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Tue, 23 Jan 2024 10:05:21 +0800 Subject: [PATCH 035/119] fix name bug (#139) (#142) * fix name bug * apply spotless * change error message --------- (cherry picked from commit 8074492891c67a72c1584d716d738d50979622a3) Signed-off-by: xinyual Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- src/main/java/org/opensearch/agent/tools/PPLTool.java | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index 5cc2b23a..cec1114d 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -148,9 +148,12 @@ public void run(Map parameters, ActionListener listener) GetMappingsRequest getMappingsRequest = buildGetMappingRequest(indexName); client.admin().indices().getMappings(getMappingsRequest, ActionListener.wrap(getMappingsResponse -> { Map mappings = getMappingsResponse.getMappings(); + if (mappings.size() == 0) { + throw new IllegalArgumentException("No matching mapping with index name: " + indexName); + } client.search(searchRequest, ActionListener.wrap(searchResponse -> { SearchHit[] searchHits = searchResponse.getHits().getHits(); - String tableInfo = constructTableInfo(searchHits, mappings, indexName); + String tableInfo = constructTableInfo(searchHits, mappings); String prompt = constructPrompt(tableInfo, question, indexName); RemoteInferenceInputDataSet inputDataSet = RemoteInferenceInputDataSet .builder() @@ -286,9 +289,9 @@ private GetMappingsRequest buildGetMappingRequest(String indexName) { return getMappingsRequest; } - private String constructTableInfo(SearchHit[] searchHits, Map mappings, String indexName) - throws PrivilegedActionException { - MappingMetadata mappingMetadata = mappings.get(indexName); + private String constructTableInfo(SearchHit[] searchHits, Map mappings) throws PrivilegedActionException { + String firstIndexName = (String) mappings.keySet().toArray()[0]; + MappingMetadata mappingMetadata = mappings.get(firstIndexName); Map mappingSource = (Map) mappingMetadata.getSourceAsMap().get("properties"); Map fieldsToType = new HashMap<>(); extractNamesTypes(mappingSource, fieldsToType, ""); From 9f28f4c0a72d74b1763375821fd0368cdd53f90b Mon Sep 17 00:00:00 2001 From: Tyler Ohlsen Date: Wed, 24 Jan 2024 13:06:47 -0800 Subject: [PATCH 036/119] [Backport 2.x] Include more return values for SearchAnomalyDetectorsTool and SearchMonitorsTool (#145) * Include more return values for SearchAnomalyDetectorsTool and SearchMonitorsTool (#143) Signed-off-by: Tyler Ohlsen * 2.x changes Signed-off-by: Tyler Ohlsen --------- Signed-off-by: Tyler Ohlsen --- .../agent/tools/SearchAlertsTool.java | 2 +- .../tools/SearchAnomalyDetectorsTool.java | 8 +- .../agent/tools/SearchAnomalyResultsTool.java | 2 +- .../agent/tools/SearchMonitorsTool.java | 15 ++- .../SearchAnomalyDetectorsToolTests.java | 107 ++++++++++++++---- .../agent/tools/SearchMonitorsToolTests.java | 27 ++++- .../SearchAnomalyDetectorsToolIT.java | 8 +- 7 files changed, 131 insertions(+), 38 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java index 0997198f..b27d0887 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java @@ -33,7 +33,7 @@ public class SearchAlertsTool implements Tool { public static final String TYPE = "SearchAlertsTool"; private static final String DEFAULT_DESCRIPTION = - "This is a tool that finds alert trigger information. It takes 12 optional argument named sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is monitor_name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0), and searchString which defines the search string to use for searching a specific alert (default is an empty String), and severityLevel which defines the severity level to filter for (default is ALL), and alertState which defines the alert state to filter for (default is ALL), and monitorId which defines the monitor ID to filter for, and alertIndex which defines the alert index to search from (default is null), and monitorIds which defines the list of monitor IDs to filter for, and workflowIds which defines the list of workflow IDs to filter for(default is null), and alertIds which defines the list of alert IDs to filter for (default is null). The tool returns a list of alerts, and the total number of alerts."; + "This is a tool that finds alert trigger information. It takes 12 optional argument named sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is monitor_name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0), and searchString which defines the search string to use for searching a specific alert (default is an empty String), and severityLevel which defines the severity level to filter for (default is ALL), and alertState which defines the alert state to filter for (default is ALL), and monitorId which defines the monitor ID to filter for, and alertIndex which defines the alert index to search from (default is null), and monitorIds which defines the list of monitor IDs to filter for, and workflowIds which defines the list of workflow IDs to filter for(default is null), and alertIds which defines the list of alert IDs to filter for (default is null). The tool returns 2 values: a list of alerts (each containining id, version, schema version, monitor ID, workflow ID, workflow name, monitor name, monitor version, monitor user, trigger ID, trigger name, finding IDs, related doc IDs, state, start time, end time, last notifcation time, acknowledged time, error message, error history, severity, action execution results, aggregation result bucket, execution ID, associated alert IDs), and the total number of alerts."; @Setter @Getter diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java index db042979..2bbbcfa7 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java @@ -49,7 +49,7 @@ public class SearchAnomalyDetectorsTool implements Tool { public static final String TYPE = "SearchAnomalyDetectorsTool"; private static final String DEFAULT_DESCRIPTION = - "This is a tool that searches anomaly detectors. It takes 12 optional arguments named detectorName which is the explicit name of the monitor (default is null), and detectorNamePattern which is a wildcard query to match detector name (default is null), and indices which defines the index being detected (default is null), and highCardinality which defines whether the anomaly detector is high cardinality (default is null), and lastUpdateTime which defines the latest update time of the anomaly detector (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0), and running which defines whether the anomaly detector is running (default is null, indicating both), and disabled which defines whether the anomaly detector is disabled (default is null, indicating both), and failed which defines whether the anomaly detector has failed (default is null, indicating both). The tool returns the list of anomaly detectors, and the total number of anomaly detectors."; + "This is a tool that searches anomaly detectors. It takes 12 optional arguments named detectorName which is the explicit name of the monitor (default is null), and detectorNamePattern which is a wildcard query to match detector name (default is null), and indices which defines the index being detected (default is null), and highCardinality which defines whether the anomaly detector is high cardinality (default is null), and lastUpdateTime which defines the latest update time of the anomaly detector (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0), and running which defines whether the anomaly detector is running (default is null, indicating both), and disabled which defines whether the anomaly detector is disabled (default is null, indicating both), and failed which defines whether the anomaly detector has failed (default is null, indicating both). The tool returns 2 values: a list of anomaly detectors (each containing the id, name, type, description, index, last update time), and the total number of anomaly results."; @Setter @Getter @@ -244,7 +244,11 @@ private void processHits(Map hitsAsMap, ActionListener for (SearchHit hit : hitsAsMap.values()) { sb.append("{"); sb.append("id=").append(hit.getId()).append(","); - sb.append("name=").append(hit.getSourceAsMap().get("name")); + sb.append("name=").append(hit.getSourceAsMap().get("name")).append(","); + sb.append("type=").append(hit.getSourceAsMap().get("type")).append(","); + sb.append("description=").append(hit.getSourceAsMap().get("description")).append(","); + sb.append("index=").append(hit.getSourceAsMap().get("indices")).append(","); + sb.append("lastUpdateTime=").append(hit.getSourceAsMap().get("last_update_time")); sb.append("}"); } sb.append("]"); diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java index 86c62918..d9d63545 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java @@ -40,7 +40,7 @@ public class SearchAnomalyResultsTool implements Tool { public static final String TYPE = "SearchAnomalyResultsTool"; private static final String DEFAULT_DESCRIPTION = - "This is a tool that searches anomaly results. It takes 9 arguments named detectorId which defines the detector ID to filter for (default is null), and realtime which defines whether the anomaly is real time, and anomalyGradeThreshold which defines the threshold for anomaly grade (a number between 0 and 1 that indicates how anomalous a data point is) (default is 0), and dataStartTime which defines the start time of the anomaly query (default is null), and dataEndTime which defines the end time of the anomaly query (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is desc), and sortString which which defines how to sort the results (default is data_start_time), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0). The tool returns a list of anomaly results, and the total number of anomaly result."; + "This is a tool that searches anomaly results. It takes 9 arguments named detectorId which defines the detector ID to filter for (default is null), and realtime which defines whether the anomaly is real time, and anomalyGradeThreshold which defines the threshold for anomaly grade (a number between 0 and 1 that indicates how anomalous a data point is) (default is 0), and dataStartTime which defines the start time of the anomaly query (default is null), and dataEndTime which defines the end time of the anomaly query (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is desc), and sortString which which defines how to sort the results (default is data_start_time), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0). The tool returns 2 values: a list of anomaly results (where each result contains the detector ID, the anomaly grade, and the confidence), and the total number of anomaly results."; @Setter @Getter diff --git a/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java index 8edcf22c..bbb9aaa7 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java @@ -49,8 +49,7 @@ public class SearchMonitorsTool implements Tool { public static final String TYPE = "SearchMonitorsTool"; private static final String DEFAULT_DESCRIPTION = - "This is a tool that searches alerting monitors. It takes 10 optional arguments named monitorId which defines the monitor ID to filter for (default is null), and monitorName which defines explicit name of the monitor (default is null), and monitorNamePattern which is a wildcard query to match detector name (default is null), and enabled which defines whether the monitor is enabled (default is null, indicating both), and hasTriggers which defines whether the monitor has triggers enabled (default is null, indicating both), and indices which defines the index being monitored (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0). The tool returns a list of monitors, and the total number of monitors."; - + "This is a tool that searches alerting monitors. It takes 10 optional arguments named monitorId which defines the monitor ID to filter for (default is null), and monitorName which defines explicit name of the monitor (default is null), and monitorNamePattern which is a wildcard query to match detector name (default is null), and enabled which defines whether the monitor is enabled (default is null, indicating both), and hasTriggers which defines whether the monitor has triggers enabled (default is null, indicating both), and indices which defines the index being monitored (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0). The tool returns 2 values: a list of alerting monitors (each containining ID, name, type, enabled, enabled time, last update time), and the total number of monitors."; @Setter @Getter private String name = TYPE; @@ -204,7 +203,11 @@ private void processHits(Map hitsAsMap, ActionListener for (SearchHit hit : hitsAsMap.values()) { sb.append("{"); sb.append("id=").append(hit.getId()).append(","); - sb.append("name=").append(hit.getSourceAsMap().get("name")); + sb.append("name=").append(hit.getSourceAsMap().get("name")).append(","); + sb.append("type=").append(hit.getSourceAsMap().get("type")).append(","); + sb.append("enabled=").append(hit.getSourceAsMap().get("enabled")).append(","); + sb.append("enabledTime=").append(hit.getSourceAsMap().get("enabled_time")).append(","); + sb.append("lastUpdateTime=").append(hit.getSourceAsMap().get("last_update_time")); sb.append("}"); } sb.append("]"); @@ -218,7 +221,11 @@ private void processGetMonitorHit(Monitor monitor, ActionListener listene sb.append("Monitors=["); sb.append("{"); sb.append("id=").append(monitor.getId()).append(","); - sb.append("name=").append(monitor.getName()); + sb.append("name=").append(monitor.getName()).append(","); + sb.append("type=").append(monitor.getType()).append(","); + sb.append("enabled=").append(monitor.getEnabled()).append(","); + sb.append("enabledTime=").append(monitor.getEnabledTime().toEpochMilli()).append(","); + sb.append("lastUpdateTime=").append(monitor.getLastUpdateTime().toEpochMilli()); sb.append("}]"); sb.append("TotalMonitors=1"); } else { diff --git a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java index 3c5b4295..c4f0c293 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java @@ -14,6 +14,10 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Locale; @@ -27,6 +31,8 @@ import org.mockito.MockitoAnnotations; import org.opensearch.action.ActionType; import org.opensearch.action.search.SearchResponse; +import org.opensearch.ad.model.AnomalyDetector; +import org.opensearch.ad.model.IntervalTimeConfiguration; import org.opensearch.ad.transport.GetAnomalyDetectorAction; import org.opensearch.ad.transport.GetAnomalyDetectorResponse; import org.opensearch.ad.transport.SearchAnomalyDetectorAction; @@ -48,6 +54,8 @@ public class SearchAnomalyDetectorsToolTests { private Map emptyParams; private Map nonEmptyParams; + private AnomalyDetector testDetector; + @Before public void setup() { MockitoAnnotations.openMocks(this); @@ -56,6 +64,26 @@ public void setup() { nullParams = null; emptyParams = Collections.emptyMap(); nonEmptyParams = Map.of("detectorName", "foo"); + + testDetector = new AnomalyDetector( + "foo-id", + 1L, + "foo-name", + "foo-description", + "foo-time-field", + new ArrayList(Arrays.asList("foo-index")), + Collections.emptyList(), + null, + new IntervalTimeConfiguration(5, ChronoUnit.MINUTES), + null, + 1, + Collections.emptyMap(), + 1, + Instant.now(), + Collections.emptyList(), + null, + null + ); } @Test @@ -87,13 +115,16 @@ public void testRunWithSingleAnomalyDetector() throws Exception { XContentBuilder content = XContentBuilder.builder(XContentType.JSON.xContent()); content.startObject(); - content.field("name", detectorName); + content.field("name", testDetector.getName()); + content.field("type", testDetector.getDetectorType()); + content.field("description", testDetector.getDescription()); + content.field("indices", testDetector.getIndices().get(0)); + content.field("last_update_time", testDetector.getLastUpdateTime().toEpochMilli()); content.endObject(); SearchHit[] hits = new SearchHit[1]; - hits[0] = new SearchHit(0, detectorId, null, null).sourceRef(BytesReference.bytes(content)); + hits[0] = new SearchHit(0, testDetector.getDetectorId(), null, null).sourceRef(BytesReference.bytes(content)); SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); - String expectedResponseStr = String - .format("AnomalyDetectors=[{id=%s,name=%s}]TotalAnomalyDetectors=%d", detectorId, detectorName, hits.length); + String expectedResponseStr = getExpectedResponseString(testDetector); @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); @@ -122,8 +153,6 @@ public void testRunWithRunningDetectorTrue() throws Exception { SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers .generateGetAnomalyDetectorResponses(new String[] { detectorId }, new String[] { DetectorStateString.Running.name() }); - String expectedResponseStr = String - .format("AnomalyDetectors=[{id=%s,name=%s}]TotalAnomalyDetectors=%d", detectorId, detectorName, hits.length); @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); @@ -131,7 +160,10 @@ public void testRunWithRunningDetectorTrue() throws Exception { tool.run(Map.of("running", "true"), listener); ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); - assertEquals(expectedResponseStr, responseCaptor.getValue()); + String response = responseCaptor.getValue(); + assertTrue(response.contains(String.format("id=%s", detectorId))); + assertTrue(response.contains(String.format("name=%s", detectorName))); + assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", hits.length))); } @Test @@ -169,8 +201,6 @@ public void testRunWithRunningDetectorUndefined() throws Exception { SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers .generateGetAnomalyDetectorResponses(new String[] { detectorId }, new String[] { DetectorStateString.Running.name() }); - String expectedResponseStr = String - .format("AnomalyDetectors=[{id=%s,name=%s}]TotalAnomalyDetectors=%d", detectorId, detectorName, hits.length); @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); @@ -178,7 +208,10 @@ public void testRunWithRunningDetectorUndefined() throws Exception { tool.run(Map.of("foo", "bar"), listener); ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); - assertEquals(expectedResponseStr, responseCaptor.getValue()); + String response = responseCaptor.getValue(); + assertTrue(response.contains(String.format("id=%s", detectorId))); + assertTrue(response.contains(String.format("name=%s", detectorName))); + assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", hits.length))); } @Test @@ -196,8 +229,6 @@ public void testRunWithNullRealtimeTask() throws Exception { // Overriding the mocked response to realtime task and setting to null. This occurs when // a detector is created but is never started. when(getDetectorProfileResponse.getRealtimeAdTask()).thenReturn(null); - String expectedResponseStr = String - .format("AnomalyDetectors=[{id=%s,name=%s}]TotalAnomalyDetectors=%d", detectorId, detectorName, hits.length); @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); @@ -205,7 +236,10 @@ public void testRunWithNullRealtimeTask() throws Exception { tool.run(Map.of("disabled", "true"), listener); ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); - assertEquals(expectedResponseStr, responseCaptor.getValue()); + String response = responseCaptor.getValue(); + assertTrue(response.contains(String.format("id=%s", detectorId))); + assertTrue(response.contains(String.format("name=%s", detectorName))); + assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", hits.length))); } @Test @@ -222,8 +256,6 @@ public void testRunWithTaskStateCreated() throws Exception { .generateGetAnomalyDetectorResponses(new String[] { detectorId }, new String[] { DetectorStateString.Running.name() }); // Overriding the mocked response to set realtime task state to CREATED when(getDetectorProfileResponse.getRealtimeAdTask().getState()).thenReturn("CREATED"); - String expectedResponseStr = String - .format("AnomalyDetectors=[{id=%s,name=%s}]TotalAnomalyDetectors=%d", detectorId, detectorName, hits.length); @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); @@ -231,7 +263,10 @@ public void testRunWithTaskStateCreated() throws Exception { tool.run(Map.of("running", "false"), listener); ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); - assertEquals(expectedResponseStr, responseCaptor.getValue()); + String response = responseCaptor.getValue(); + assertTrue(response.contains(String.format("id=%s", detectorId))); + assertTrue(response.contains(String.format("name=%s", detectorName))); + assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", hits.length))); } @Test @@ -262,7 +297,14 @@ public void testRunWithTaskStateVariousFailed() throws Exception { tool.run(Map.of("failed", "true"), listener); ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); - assertTrue(responseCaptor.getValue().contains("TotalAnomalyDetectors=3")); + String response = responseCaptor.getValue(); + assertTrue(response.contains(String.format("id=%s", detectorId1))); + assertTrue(response.contains(String.format("name=%s", detectorName1))); + assertTrue(response.contains(String.format("id=%s", detectorId2))); + assertTrue(response.contains(String.format("name=%s", detectorName2))); + assertTrue(response.contains(String.format("id=%s", detectorId3))); + assertTrue(response.contains(String.format("name=%s", detectorName3))); + assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", hits.length))); } @Test @@ -293,7 +335,14 @@ public void testRunWithCombinedDetectorStatesTrue() throws Exception { tool.run(Map.of("running", "true", "disabled", "true", "failed", "true"), listener); ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); - assertTrue(responseCaptor.getValue().contains("TotalAnomalyDetectors=3")); + String response = responseCaptor.getValue(); + assertTrue(response.contains(String.format("id=%s", detectorId1))); + assertTrue(response.contains(String.format("name=%s", detectorName1))); + assertTrue(response.contains(String.format("id=%s", detectorId2))); + assertTrue(response.contains(String.format("name=%s", detectorName2))); + assertTrue(response.contains(String.format("id=%s", detectorId3))); + assertTrue(response.contains(String.format("name=%s", detectorName3))); + assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", hits.length))); } @Test @@ -355,7 +404,12 @@ public void testRunWithCombinedDetectorStatesMixed() throws Exception { tool.run(Map.of("running", "true", "disabled", "false", "failed", "true"), listener); ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); - assertTrue(responseCaptor.getValue().contains("TotalAnomalyDetectors=2")); + String response = responseCaptor.getValue(); + assertTrue(response.contains(String.format("id=%s", detectorId1))); + assertTrue(response.contains(String.format("name=%s", detectorName1))); + assertTrue(response.contains(String.format("id=%s", detectorId3))); + assertTrue(response.contains(String.format("name=%s", detectorName3))); + assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", 2))); } @Test @@ -403,4 +457,19 @@ private void mockProfileApiCalls(SearchResponse getDetectorsResponse, GetAnomaly return null; }).when(nodeClient).execute(any(GetAnomalyDetectorAction.class), any(), any()); } + + private String getExpectedResponseString(AnomalyDetector testDetector) { + return String + .format( + "AnomalyDetectors=[{id=%s,name=%s,type=%s,description=%s,index=%s,lastUpdateTime=%d}]TotalAnomalyDetectors=%d", + testDetector.getDetectorId(), + testDetector.getName(), + testDetector.getDetectorType(), + testDetector.getDescription(), + testDetector.getIndices().get(0), + testDetector.getLastUpdateTime().toEpochMilli(), + 1 + ); + + } } diff --git a/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java index 37bc960f..72338fec 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java @@ -140,9 +140,7 @@ public void testRunWithMonitorId() throws Exception { testMonitor, Collections.emptyList() ); - String expectedResponseStr = String - .format("Monitors=[{id=%s,name=%s}]TotalMonitors=%d", testMonitor.getId(), testMonitor.getName(), 1); - + String expectedResponseStr = getExpectedResponseString(testMonitor); @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); @@ -186,8 +184,11 @@ public void testRunWithSingleMonitor() throws Exception { XContentBuilder content = XContentBuilder.builder(XContentType.JSON.xContent()); content.startObject(); - content.field("type", "monitor"); content.field("name", testMonitor.getName()); + content.field("type", testMonitor.getType()); + content.field("enabled", Boolean.toString(testMonitor.getEnabled())); + content.field("enabled_time", Long.toString(testMonitor.getEnabledTime().toEpochMilli())); + content.field("last_update_time", Long.toString(testMonitor.getLastUpdateTime().toEpochMilli())); content.endObject(); SearchHit[] hits = new SearchHit[1]; hits[0] = new SearchHit(0, testMonitor.getId(), null, null).sourceRef(BytesReference.bytes(content)); @@ -204,8 +205,7 @@ public void testRunWithSingleMonitor() throws Exception { null, null ); - String expectedResponseStr = String - .format("Monitors=[{id=%s,name=%s}]TotalMonitors=%d", testMonitor.getId(), testMonitor.getName(), hits.length); + String expectedResponseStr = getExpectedResponseString(testMonitor); @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); @@ -253,4 +253,19 @@ public void testValidate() { assertTrue(tool.validate(monitorIdParams)); assertTrue(tool.validate(nullParams)); } + + private String getExpectedResponseString(Monitor testMonitor) { + return String + .format( + "Monitors=[{id=%s,name=%s,type=%s,enabled=%s,enabledTime=%d,lastUpdateTime=%d}]TotalMonitors=%d", + testMonitor.getId(), + testMonitor.getName(), + testMonitor.getType(), + testMonitor.getEnabled(), + testMonitor.getEnabledTime().toEpochMilli(), + testMonitor.getLastUpdateTime().toEpochMilli(), + 1 + ); + + } } diff --git a/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java b/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java index 0faa7a21..aa406f8d 100644 --- a/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java +++ b/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java @@ -8,7 +8,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.List; -import java.util.Locale; import org.junit.After; import org.junit.Before; @@ -69,10 +68,9 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_matching() { String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"detectorName\": \"" + detectorName + "\"}}"; String result = executeAgent(agentId, agentInput); - assertEquals( - String.format(Locale.ROOT, "AnomalyDetectors=[{id=%s,name=%s}]TotalAnomalyDetectors=%d", detectorId, detectorName, 1), - result - ); + assertTrue(result.contains(String.format("id=%s", detectorId))); + assertTrue(result.contains(String.format("name=%s", detectorName))); + assertTrue(result.contains(String.format("TotalAnomalyDetectors=%d", 1))); } @SneakyThrows From 18e31ae3780210214790d1d97d6d8387c99c3d77 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Thu, 25 Jan 2024 13:53:51 -0800 Subject: [PATCH 037/119] Change default anomaly grade to nonzero (#148) (#149) (cherry picked from commit a87b9573a2fb85f7cf03ca0242aa23061d3789ac) Signed-off-by: Tyler Ohlsen Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../org/opensearch/agent/tools/SearchAnomalyResultsTool.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java index d9d63545..701c707c 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java @@ -85,7 +85,7 @@ public void run(Map parameters, ActionListener listener) final Boolean realTime = parameters.containsKey("realTime") ? Boolean.parseBoolean(parameters.get("realTime")) : null; final Double anomalyGradeThreshold = parameters.containsKey("anomalyGradeThreshold") ? Double.parseDouble(parameters.get("anomalyGradeThreshold")) - : null; + : 0; final Long dataStartTime = parameters.containsKey("dataStartTime") && StringUtils.isNumeric(parameters.get("dataStartTime")) ? Long.parseLong(parameters.get("dataStartTime")) : null; @@ -115,7 +115,7 @@ public void run(Map parameters, ActionListener listener) mustList.add(boolQuery); } if (anomalyGradeThreshold != null) { - mustList.add(new RangeQueryBuilder("anomaly_grade").gte(anomalyGradeThreshold)); + mustList.add(new RangeQueryBuilder("anomaly_grade").gt(anomalyGradeThreshold)); } if (dataStartTime != null || dataEndTime != null) { RangeQueryBuilder rangeQuery = new RangeQueryBuilder("anomaly_grade"); From bafff969486258ab64c56a6843530218c4a889cc Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 26 Jan 2024 09:01:26 +0800 Subject: [PATCH 038/119] add execute field (#146) (#147) * add execute field * apply spotless --------- (cherry picked from commit 16a26ce06ffefc44010ccbe67f32ba09c8bb4e8c) Signed-off-by: xinyual Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../java/org/opensearch/agent/tools/PPLTool.java | 12 ++++++++++-- .../org/opensearch/agent/tools/PPLToolTests.java | 13 +++++++++++++ 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index cec1114d..c008d523 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -87,6 +87,8 @@ public class PPLTool implements Tool { private String contextPrompt; + private Boolean execute; + private PPLModelType pplModelType; private static Gson gson = new Gson(); @@ -120,7 +122,7 @@ public static PPLModelType from(String value) { } - public PPLTool(Client client, String modelId, String contextPrompt, String pplModelType) { + public PPLTool(Client client, String modelId, String contextPrompt, String pplModelType, boolean execute) { this.client = client; this.modelId = modelId; this.pplModelType = PPLModelType.from(pplModelType); @@ -129,6 +131,7 @@ public PPLTool(Client client, String modelId, String contextPrompt, String pplMo } else { this.contextPrompt = contextPrompt; } + this.execute = execute; } @Override @@ -169,6 +172,10 @@ public void run(Map parameters, ActionListener listener) ModelTensor modelTensor = modelTensors.getMlModelTensors().get(0); Map dataAsMap = (Map) modelTensor.getDataAsMap(); String ppl = parseOutput(dataAsMap.get("response"), indexName); + if (!this.execute) { + listener.onResponse((T) ppl); + return; + } JSONObject jsonContent = new JSONObject(ImmutableMap.of("query", ppl)); PPLQueryRequest pplQueryRequest = new PPLQueryRequest(ppl, jsonContent, null, "jdbc"); TransportPPLQueryRequest transportPPLQueryRequest = new TransportPPLQueryRequest(pplQueryRequest); @@ -253,7 +260,8 @@ public PPLTool create(Map map) { client, (String) map.get("model_id"), (String) map.getOrDefault("prompt", ""), - (String) map.getOrDefault("model_type", "") + (String) map.getOrDefault("model_type", ""), + (boolean) map.getOrDefault("execute", true) ); } diff --git a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java index 680586d0..129c2411 100644 --- a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java @@ -138,6 +138,19 @@ public void testTool() { } + @Test + public void testTool_with_WithoutExecution() { + PPLTool tool = PPLTool.Factory + .getInstance() + .create(ImmutableMap.of("model_id", "modelId", "model_type", "claude", "execute", false)); + assertEquals(PPLTool.TYPE, tool.getName()); + + tool.run(ImmutableMap.of("index", "demo", "question", "demo"), ActionListener.wrap(executePPLResult -> { + assertEquals("source=demo| head 1", executePPLResult); + }, e -> { log.info(e); })); + + } + @Test public void testTool_with_DefaultPrompt() { PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "model_type", "claude")); From 1194d061c12789e111c17d54616a603c098ffdbb Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 26 Jan 2024 14:42:13 -0600 Subject: [PATCH 039/119] add description for VectorDBTool and remove json parsig for RAGTool (#121) (#124) (cherry picked from commit c81746e78d534272ee7760db28ed5bb487cfd6b8) Signed-off-by: Mingshi Liu Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../org/opensearch/agent/tools/RAGTool.java | 4 +- .../opensearch/agent/tools/VectorDBTool.java | 3 ++ .../opensearch/agent/tools/RAGToolTests.java | 41 ++++++++++++++----- 3 files changed, 34 insertions(+), 14 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/RAGTool.java b/src/main/java/org/opensearch/agent/tools/RAGTool.java index a88930fd..e3670bd0 100644 --- a/src/main/java/org/opensearch/agent/tools/RAGTool.java +++ b/src/main/java/org/opensearch/agent/tools/RAGTool.java @@ -90,7 +90,6 @@ public RAGTool( this.docSize = docSize == null ? DEFAULT_DOC_SIZE : docSize; this.k = k == null ? DEFAULT_K : k; this.inferenceModelId = inferenceModelId; - outputParser = new Parser() { @Override public Object parse(Object o) { @@ -115,8 +114,7 @@ public void run(Map parameters, ActionListener listener) } try { - String question = parameters.get(INPUT_FIELD); - input = gson.fromJson(question, String.class); + input = parameters.get(INPUT_FIELD); } catch (Exception e) { log.error("Failed to read question from " + INPUT_FIELD, e); listener.onFailure(new IllegalArgumentException("Failed to read question from " + INPUT_FIELD)); diff --git a/src/main/java/org/opensearch/agent/tools/VectorDBTool.java b/src/main/java/org/opensearch/agent/tools/VectorDBTool.java index aea3a01c..dfbbed26 100644 --- a/src/main/java/org/opensearch/agent/tools/VectorDBTool.java +++ b/src/main/java/org/opensearch/agent/tools/VectorDBTool.java @@ -28,6 +28,9 @@ @ToolAnnotation(VectorDBTool.TYPE) public class VectorDBTool extends AbstractRetrieverTool { public static final String TYPE = "VectorDBTool"; + + public static String DEFAULT_DESCRIPTION = + "Use this tool to performs knn-based dense retrieval. It takes 1 argument named input which is a string query for dense retrieval. The tool returns the dense retrieval results for the query."; public static final String MODEL_ID_FIELD = "model_id"; public static final String EMBEDDING_FIELD = "embedding_field"; public static final String K_FIELD = "k"; diff --git a/src/test/java/org/opensearch/agent/tools/RAGToolTests.java b/src/test/java/org/opensearch/agent/tools/RAGToolTests.java index 79bfcebf..8ef43468 100644 --- a/src/test/java/org/opensearch/agent/tools/RAGToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/RAGToolTests.java @@ -203,6 +203,36 @@ public void testRunWithEmptySearchResponse() throws IOException { verify(client).execute(any(), any(), any()); } + @Test + public void testRunWithQuestionJson() throws IOException { + NamedXContentRegistry mockNamedXContentRegistry = getNeuralQueryNamedXContentRegistry(); + ragTool.setXContentRegistry(mockNamedXContentRegistry); + + ModelTensorOutput mlModelTensorOutput = getMlModelTensorOutput(); + SearchResponse mockedEmptySearchResponse = SearchResponse + .fromXContent( + JsonXContent.jsonXContent + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, mockedEmptySearchResponseString) + ); + + doAnswer(invocation -> { + SearchRequest searchRequest = invocation.getArgument(0); + assertEquals((long) TEST_DOC_SIZE, (long) searchRequest.source().size()); + ActionListener listener = invocation.getArgument(1); + listener.onResponse(mockedEmptySearchResponse); + return null; + }).when(client).search(any(), any()); + + doAnswer(invocation -> { + ActionListener actionListener = invocation.getArgument(2); + actionListener.onResponse(MLTaskResponse.builder().output(mlModelTensorOutput).build()); + return null; + }).when(client).execute(eq(MLPredictionTaskAction.INSTANCE), any(), any()); + ragTool.run(Map.of(INPUT_FIELD, "{question:'what is the population in seattle?'}"), listener); + verify(client).search(any(), any()); + verify(client).execute(any(), any(), any()); + } + @Test @SneakyThrows public void testRunWithRuntimeExceptionDuringSearch() { @@ -261,17 +291,6 @@ public void testRunWithEmptyInput() { ragTool.run(Map.of(INPUT_FIELD, ""), listener); } - @Test - public void testRunWithMalformedInput() throws IOException { - ActionListener listener = mock(ActionListener.class); - ragTool.run(Map.of(INPUT_FIELD, "{hello?"), listener); - verify(listener).onFailure(any(RuntimeException.class)); - ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(Exception.class); - verify(listener).onFailure(argumentCaptor.capture()); - assertEquals("Failed to read question from " + INPUT_FIELD, argumentCaptor.getValue().getMessage()); - - } - @Test public void testFactory() { RAGTool.Factory factoryMock = new RAGTool.Factory(); From 361e0a109897828cdfca18868e69bff5b43845ca Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 26 Jan 2024 12:53:55 -0800 Subject: [PATCH 040/119] Enhance RagTool to choose neural sparse query type (#140) (#152) * enhance RagTool to choose neural sparse query type * Work around JDK 21.0.2 bug impacting scaling executors * Modify RAGTool factory create to initiate subtools * Add enableContentGeneration to RAGTool * Map embedding_model_id to model_id for NeuralSparseTool * Map embedding_model_id to model_id for NeuralSparseTool --------- (cherry picked from commit d7945d55dc28f95126891de8d8ca4ac2ea26e280) Signed-off-by: Mingshi Liu Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .github/workflows/ci.yml | 4 +- .../org/opensearch/agent/tools/RAGTool.java | 148 +++++----- .../opensearch/agent/tools/VectorDBTool.java | 5 + .../opensearch/agent/tools/RAGToolTests.java | 270 ++++++++++++++---- .../agent/tools/VectorDBToolTests.java | 2 +- .../neural_sparse_tool_search_response.json | 71 +++++ 6 files changed, 357 insertions(+), 143 deletions(-) create mode 100644 src/test/resources/org/opensearch/agent/tools/neural_sparse_tool_search_response.json diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3183a959..87f9899c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -38,7 +38,7 @@ jobs: java: - 11 - 17 - - 21 + - 21.0.1 name: Build and Test skills plugin on Linux runs-on: ubuntu-latest container: @@ -98,7 +98,7 @@ jobs: java: - 11 - 17 - - 21 + - 21.0.1 name: Build and Test skills plugin on Windows needs: Get-CI-Image-Tag runs-on: windows-latest diff --git a/src/main/java/org/opensearch/agent/tools/RAGTool.java b/src/main/java/org/opensearch/agent/tools/RAGTool.java index e3670bd0..6c341b05 100644 --- a/src/main/java/org/opensearch/agent/tools/RAGTool.java +++ b/src/main/java/org/opensearch/agent/tools/RAGTool.java @@ -6,7 +6,7 @@ package org.opensearch.agent.tools; import static org.apache.commons.lang3.StringEscapeUtils.escapeJson; -import static org.opensearch.agent.tools.VectorDBTool.DEFAULT_K; +import static org.opensearch.agent.tools.AbstractRetrieverTool.*; import static org.opensearch.ml.common.utils.StringUtils.gson; import static org.opensearch.ml.common.utils.StringUtils.toJson; @@ -21,10 +21,10 @@ import org.opensearch.ml.common.FunctionName; import org.opensearch.ml.common.dataset.remote.RemoteInferenceInputDataSet; import org.opensearch.ml.common.input.MLInput; -import org.opensearch.ml.common.output.model.ModelTensor; import org.opensearch.ml.common.output.model.ModelTensorOutput; import org.opensearch.ml.common.output.model.ModelTensors; import org.opensearch.ml.common.spi.tools.Parser; +import org.opensearch.ml.common.spi.tools.Tool; import org.opensearch.ml.common.spi.tools.ToolAnnotation; import org.opensearch.ml.common.transport.prediction.MLPredictionTaskAction; import org.opensearch.ml.common.transport.prediction.MLPredictionTaskRequest; @@ -44,25 +44,28 @@ @Setter @Getter @ToolAnnotation(RAGTool.TYPE) -public class RAGTool extends AbstractRetrieverTool { +public class RAGTool implements Tool { public static final String TYPE = "RAGTool"; public static String DEFAULT_DESCRIPTION = "Use this tool to retrieve helpful information to optimize the output of the large language model to answer questions."; public static final String INFERENCE_MODEL_ID_FIELD = "inference_model_id"; public static final String EMBEDDING_MODEL_ID_FIELD = "embedding_model_id"; + public static final String INDEX_FIELD = "index"; + public static final String SOURCE_FIELD = "source_field"; + public static final String DOC_SIZE_FIELD = "doc_size"; public static final String EMBEDDING_FIELD = "embedding_field"; public static final String OUTPUT_FIELD = "output_field"; + public static final String QUERY_TYPE = "query_type"; + public static final String CONTENT_GENERATION_FIELD = "enable_Content_Generation"; + public static final String K_FIELD = "k"; + private final AbstractRetrieverTool queryTool; private String name = TYPE; private String description = DEFAULT_DESCRIPTION; private Client client; private String inferenceModelId; + private Boolean enableContentGeneration; private NamedXContentRegistry xContentRegistry; - private String index; - private String embeddingField; - private String[] sourceFields; - private String embeddingModelId; - private Integer docSize; - private Integer k; + private String queryType; @Setter private Parser inputParser; @Setter @@ -72,24 +75,15 @@ public class RAGTool extends AbstractRetrieverTool { public RAGTool( Client client, NamedXContentRegistry xContentRegistry, - String index, - String embeddingField, - String[] sourceFields, - Integer k, - Integer docSize, - String embeddingModelId, - String inferenceModelId + String inferenceModelId, + Boolean enableContentGeneration, + AbstractRetrieverTool queryTool ) { - super(client, xContentRegistry, index, sourceFields, docSize); this.client = client; this.xContentRegistry = xContentRegistry; - this.index = index; - this.embeddingField = embeddingField; - this.sourceFields = sourceFields; - this.embeddingModelId = embeddingModelId; - this.docSize = docSize == null ? DEFAULT_DOC_SIZE : docSize; - this.k = k == null ? DEFAULT_K : k; this.inferenceModelId = inferenceModelId; + this.enableContentGeneration = enableContentGeneration; + this.queryTool = queryTool; outputParser = new Parser() { @Override public Object parse(Object o) { @@ -99,13 +93,6 @@ public Object parse(Object o) { }; } - // getQueryBody is not used in RAGTool - @Override - protected String getQueryBody(String queryText) { - return queryText; - } - - @Override public void run(Map parameters, ActionListener listener) { String input = null; @@ -121,22 +108,14 @@ public void run(Map parameters, ActionListener listener) return; } - Map params = new HashMap<>(); - VectorDBTool.Factory.getInstance().init(client, xContentRegistry); - params.put(VectorDBTool.INDEX_FIELD, this.index); - params.put(VectorDBTool.EMBEDDING_FIELD, this.embeddingField); - params.put(VectorDBTool.SOURCE_FIELD, gson.toJson(this.sourceFields)); - params.put(VectorDBTool.MODEL_ID_FIELD, this.embeddingModelId); - params.put(VectorDBTool.DOC_SIZE_FIELD, String.valueOf(this.docSize)); - params.put(VectorDBTool.K_FIELD, String.valueOf(this.k)); - VectorDBTool vectorDBTool = VectorDBTool.Factory.getInstance().create(params); - String embeddingInput = input; ActionListener actionListener = ActionListener.wrap(r -> { - T vectorDBToolOutput; - + T queryToolOutput; + if (!this.enableContentGeneration) { + listener.onResponse(r); + } if (r.equals("Can not get any match from search result.")) { - vectorDBToolOutput = (T) ""; + queryToolOutput = (T) ""; } else { Gson gson = new Gson(); String[] hits = r.toString().split("\n"); @@ -151,31 +130,21 @@ public void run(Map parameters, ActionListener listener) resultBuilder.append("_source: ").append(source.toString()).append("\n"); } - vectorDBToolOutput = (T) gson.toJson(resultBuilder.toString()); + queryToolOutput = (T) gson.toJson(resultBuilder.toString()); } Map tmpParameters = new HashMap<>(); tmpParameters.putAll(parameters); - if (vectorDBToolOutput instanceof List - && !((List) vectorDBToolOutput).isEmpty() - && ((List) vectorDBToolOutput).get(0) instanceof ModelTensors) { - ModelTensors tensors = (ModelTensors) ((List) vectorDBToolOutput).get(0); - Object response = tensors.getMlModelTensors().get(0).getDataAsMap().get("response"); - tmpParameters.put(OUTPUT_FIELD, response + ""); - } else if (vectorDBToolOutput instanceof ModelTensor) { - tmpParameters.put(OUTPUT_FIELD, escapeJson(toJson(((ModelTensor) vectorDBToolOutput).getDataAsMap()))); + if (queryToolOutput instanceof String) { + tmpParameters.put(OUTPUT_FIELD, (String) queryToolOutput); } else { - if (vectorDBToolOutput instanceof String) { - tmpParameters.put(OUTPUT_FIELD, (String) vectorDBToolOutput); - } else { - tmpParameters.put(OUTPUT_FIELD, escapeJson(toJson(vectorDBToolOutput.toString()))); - } + tmpParameters.put(OUTPUT_FIELD, escapeJson(toJson(queryToolOutput.toString()))); } RemoteInferenceInputDataSet inputDataSet = RemoteInferenceInputDataSet.builder().parameters(tmpParameters).build(); MLInput mlInput = MLInput.builder().algorithm(FunctionName.REMOTE).inputDataset(inputDataSet).build(); - ActionRequest request = new MLPredictionTaskRequest(inferenceModelId, mlInput, null); + ActionRequest request = new MLPredictionTaskRequest(this.inferenceModelId, mlInput, null); client.execute(MLPredictionTaskAction.INSTANCE, request, ActionListener.wrap(resp -> { ModelTensorOutput modelTensorOutput = (ModelTensorOutput) resp.getOutput(); @@ -186,33 +155,33 @@ public void run(Map parameters, ActionListener listener) listener.onResponse((T) outputParser.parse(modelTensorOutput.getMlModelOutputs())); } }, e -> { - log.error("Failed to run model " + inferenceModelId, e); + log.error("Failed to run model " + this.inferenceModelId, e); listener.onFailure(e); })); }, e -> { log.error("Failed to search index.", e); listener.onFailure(e); }); - vectorDBTool.run(Map.of(VectorDBTool.INPUT_FIELD, embeddingInput), actionListener); - + this.queryTool.run(Map.of(INPUT_FIELD, embeddingInput), actionListener); } - @Override public String getType() { return TYPE; } @Override + public String getVersion() { + return null; + } + public String getName() { return this.name; } - @Override public void setName(String s) { this.name = s; } - @Override public boolean validate(Map parameters) { if (parameters == null || parameters.size() == 0) { return false; @@ -224,7 +193,7 @@ public boolean validate(Map parameters) { /** * Factory class to create RAGTool */ - public static class Factory extends AbstractRetrieverTool.Factory { + public static class Factory implements Tool.Factory { private Client client; private NamedXContentRegistry xContentRegistry; @@ -250,23 +219,40 @@ public void init(Client client, NamedXContentRegistry xContentRegistry) { @Override public RAGTool create(Map params) { + String queryType = params.containsKey(QUERY_TYPE) ? (String) params.get(QUERY_TYPE) : "neural"; String embeddingModelId = (String) params.get(EMBEDDING_MODEL_ID_FIELD); - String index = (String) params.get(INDEX_FIELD); - String embeddingField = (String) params.get(EMBEDDING_FIELD); - String[] sourceFields = gson.fromJson((String) params.get(SOURCE_FIELD), String[].class); - String inferenceModelId = (String) params.get(INFERENCE_MODEL_ID_FIELD); - Integer docSize = params.containsKey(DOC_SIZE_FIELD) ? Integer.parseInt((String) params.get(DOC_SIZE_FIELD)) : 2; - return RAGTool - .builder() - .client(client) - .xContentRegistry(xContentRegistry) - .index(index) - .embeddingField(embeddingField) - .sourceFields(sourceFields) - .embeddingModelId(embeddingModelId) - .docSize(docSize) - .inferenceModelId(inferenceModelId) - .build(); + Boolean enableContentGeneration = params.containsKey(CONTENT_GENERATION_FIELD) + ? Boolean.parseBoolean((String) params.get(CONTENT_GENERATION_FIELD)) + : true; + String inferenceModelId = enableContentGeneration ? (String) params.get(INFERENCE_MODEL_ID_FIELD) : ""; + switch (queryType) { + case "neural_sparse": + params.put(NeuralSparseSearchTool.MODEL_ID_FIELD, embeddingModelId); + NeuralSparseSearchTool neuralSparseSearchTool = NeuralSparseSearchTool.Factory.getInstance().create(params); + return RAGTool + .builder() + .client(client) + .xContentRegistry(xContentRegistry) + .inferenceModelId(inferenceModelId) + .enableContentGeneration(enableContentGeneration) + .queryTool(neuralSparseSearchTool) + .build(); + case "neural": + params.put(VectorDBTool.MODEL_ID_FIELD, embeddingModelId); + VectorDBTool vectorDBTool = VectorDBTool.Factory.getInstance().create(params); + return RAGTool + .builder() + .client(client) + .xContentRegistry(xContentRegistry) + .inferenceModelId(inferenceModelId) + .enableContentGeneration(enableContentGeneration) + .queryTool(vectorDBTool) + .build(); + default: + log.error("Failed to read queryType, please input neural_sparse or neural."); + throw new IllegalArgumentException("Failed to read queryType, please input neural_sparse or neural."); + } + } @Override diff --git a/src/main/java/org/opensearch/agent/tools/VectorDBTool.java b/src/main/java/org/opensearch/agent/tools/VectorDBTool.java index dfbbed26..dd83cb46 100644 --- a/src/main/java/org/opensearch/agent/tools/VectorDBTool.java +++ b/src/main/java/org/opensearch/agent/tools/VectorDBTool.java @@ -128,5 +128,10 @@ public String getDefaultType() { public String getDefaultVersion() { return null; } + + @Override + public String getDefaultDescription() { + return DEFAULT_DESCRIPTION; + } } } diff --git a/src/test/java/org/opensearch/agent/tools/RAGToolTests.java b/src/test/java/org/opensearch/agent/tools/RAGToolTests.java index 8ef43468..4696c12c 100644 --- a/src/test/java/org/opensearch/agent/tools/RAGToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/RAGToolTests.java @@ -53,20 +53,14 @@ public class RAGToolTests { public static final String TEST_EMBEDDING_FIELD = "test_embedding"; public static final String TEST_EMBEDDING_MODEL_ID = "1234"; public static final String TEST_INFERENCE_MODEL_ID = "1234"; + public static final String TEST_NEURAL_QUERY_TYPE = "neural"; + public static final String TEST_NEURAL_SPARSE_QUERY_TYPE = "neural_sparse"; - public static final String TEST_NEURAL_QUERY = "{\"query\":{\"neural\":{\"" - + TEST_EMBEDDING_FIELD - + "\":{\"query_text\":\"" - + TEST_QUERY_TEXT - + "\",\"model_id\":\"" - + TEST_EMBEDDING_MODEL_ID - + "\",\"k\":" - + DEFAULT_K - + "}}}" - + " }";; + static public final NamedXContentRegistry TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY = getQueryNamedXContentRegistry(); private RAGTool ragTool; private String mockedSearchResponseString; private String mockedEmptySearchResponseString; + private String mockedNeuralSparseSearchResponseString; @Mock private Parser mockOutputParser; @Mock @@ -89,10 +83,16 @@ public void setup() { } } + try (InputStream searchResponseIns = AbstractRetrieverTool.class.getResourceAsStream("neural_sparse_tool_search_response.json")) { + if (searchResponseIns != null) { + mockedNeuralSparseSearchResponseString = new String(searchResponseIns.readAllBytes(), StandardCharsets.UTF_8); + } + } client = mock(Client.class); listener = mock(ActionListener.class); - RAGTool.Factory.getInstance().init(client, TEST_XCONTENT_REGISTRY_FOR_QUERY); - + RAGTool.Factory.getInstance().init(client, TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY); + VectorDBTool.Factory.getInstance().init(client, TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY); + NeuralSparseSearchTool.Factory.getInstance().init(client, TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY); params = new HashMap<>(); params.put(RAGTool.INDEX_FIELD, TEST_INDEX); params.put(RAGTool.EMBEDDING_FIELD, TEST_EMBEDDING_FIELD); @@ -100,7 +100,9 @@ public void setup() { params.put(RAGTool.EMBEDDING_MODEL_ID_FIELD, TEST_EMBEDDING_MODEL_ID); params.put(RAGTool.INFERENCE_MODEL_ID_FIELD, TEST_INFERENCE_MODEL_ID); params.put(RAGTool.DOC_SIZE_FIELD, AbstractRetrieverToolTests.TEST_DOC_SIZE.toString()); - params.put(VectorDBTool.K_FIELD, DEFAULT_K); + params.put(RAGTool.K_FIELD, DEFAULT_K.toString()); + params.put(RAGTool.QUERY_TYPE, TEST_NEURAL_QUERY_TYPE); + params.put(RAGTool.CONTENT_GENERATION_FIELD, "true"); ragTool = RAGTool.Factory.getInstance().create(params); } @@ -118,12 +120,6 @@ public void testValidate() { public void testGetAttributes() { assertEquals(ragTool.getVersion(), null); assertEquals(ragTool.getType(), RAGTool.TYPE); - assertEquals(ragTool.getIndex(), TEST_INDEX); - assertEquals(ragTool.getDocSize(), TEST_DOC_SIZE); - assertEquals(ragTool.getSourceFields(), TEST_SOURCE_FIELDS); - assertEquals(ragTool.getEmbeddingField(), TEST_EMBEDDING_FIELD); - assertEquals(ragTool.getEmbeddingModelId(), TEST_EMBEDDING_MODEL_ID); - assertEquals(ragTool.getK(), DEFAULT_K); assertEquals(ragTool.getInferenceModelId(), TEST_INFERENCE_MODEL_ID); } @@ -134,15 +130,10 @@ public void testSetName() { assertEquals(ragTool.getName(), "test-tool"); } - @Test - public void testGetQueryBodySuccess() { - assertEquals(ragTool.getQueryBody(TEST_QUERY_TEXT), TEST_QUERY_TEXT); - } - @Test public void testOutputParser() throws IOException { - NamedXContentRegistry mockNamedXContentRegistry = getNeuralQueryNamedXContentRegistry(); + NamedXContentRegistry mockNamedXContentRegistry = getQueryNamedXContentRegistry(); ragTool.setXContentRegistry(mockNamedXContentRegistry); ModelTensorOutput mlModelTensorOutput = getMlModelTensorOutput(); @@ -167,7 +158,7 @@ public void testOutputParser() throws IOException { }).when(client).execute(eq(MLPredictionTaskAction.INSTANCE), any(), any()); ragTool.setOutputParser(mockOutputParser); - ragTool.run(Map.of(INPUT_FIELD, "hello?"), listener); + ragTool.run(Map.of(INPUT_FIELD, TEST_QUERY_TEXT), listener); verify(client).search(any(), any()); verify(client).execute(any(), any(), any()); @@ -175,7 +166,7 @@ public void testOutputParser() throws IOException { @Test public void testRunWithEmptySearchResponse() throws IOException { - NamedXContentRegistry mockNamedXContentRegistry = getNeuralQueryNamedXContentRegistry(); + NamedXContentRegistry mockNamedXContentRegistry = getQueryNamedXContentRegistry(); ragTool.setXContentRegistry(mockNamedXContentRegistry); ModelTensorOutput mlModelTensorOutput = getMlModelTensorOutput(); @@ -198,14 +189,68 @@ public void testRunWithEmptySearchResponse() throws IOException { actionListener.onResponse(MLTaskResponse.builder().output(mlModelTensorOutput).build()); return null; }).when(client).execute(eq(MLPredictionTaskAction.INSTANCE), any(), any()); - ragTool.run(Map.of(INPUT_FIELD, "hello?"), listener); + ragTool.run(Map.of(INPUT_FIELD, TEST_QUERY_TEXT), listener); + verify(client).search(any(), any()); + verify(client).execute(any(), any(), any()); + } + + @Test + public void testRunWithNeuralSparseQueryType() throws IOException { + + Map paramsWithNeuralSparse = new HashMap<>(params); + paramsWithNeuralSparse.put(RAGTool.QUERY_TYPE, TEST_NEURAL_SPARSE_QUERY_TYPE); + + RAGTool rAGtoolWithNeuralSparseQuery = RAGTool.Factory.getInstance().create(paramsWithNeuralSparse); + + NamedXContentRegistry mockNamedXContentRegistry = getQueryNamedXContentRegistry(); + rAGtoolWithNeuralSparseQuery.setXContentRegistry(mockNamedXContentRegistry); + + ModelTensorOutput mlModelTensorOutput = getMlModelTensorOutput(); + SearchResponse mockedNeuralSparseSearchResponse = SearchResponse + .fromXContent( + JsonXContent.jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.IGNORE_DEPRECATIONS, + mockedNeuralSparseSearchResponseString + ) + ); + + doAnswer(invocation -> { + SearchRequest searchRequest = invocation.getArgument(0); + assertEquals((long) TEST_DOC_SIZE, (long) searchRequest.source().size()); + ActionListener listener = invocation.getArgument(1); + listener.onResponse(mockedNeuralSparseSearchResponse); + return null; + }).when(client).search(any(), any()); + + doAnswer(invocation -> { + ActionListener actionListener = invocation.getArgument(2); + actionListener.onResponse(MLTaskResponse.builder().output(mlModelTensorOutput).build()); + return null; + }).when(client).execute(eq(MLPredictionTaskAction.INSTANCE), any(), any()); + rAGtoolWithNeuralSparseQuery.run(Map.of(INPUT_FIELD, TEST_QUERY_TEXT), listener); verify(client).search(any(), any()); verify(client).execute(any(), any(), any()); } + @Test + public void testRunWithInvalidQueryType() throws IOException { + + RAGTool.Factory.getInstance().init(client, TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY); + Map paramsWithInvalidQueryType = new HashMap<>(params); + paramsWithInvalidQueryType.put(RAGTool.QUERY_TYPE, "sparse"); + try { + RAGTool rAGtoolWithInvalidQueryType = RAGTool.Factory.getInstance().create(paramsWithInvalidQueryType); + } catch (IllegalArgumentException e) { + assertEquals("Failed to read queryType, please input neural_sparse or neural.", e.getMessage()); + } + + } + @Test public void testRunWithQuestionJson() throws IOException { - NamedXContentRegistry mockNamedXContentRegistry = getNeuralQueryNamedXContentRegistry(); + NamedXContentRegistry mockNamedXContentRegistry = getQueryNamedXContentRegistry(); ragTool.setXContentRegistry(mockNamedXContentRegistry); ModelTensorOutput mlModelTensorOutput = getMlModelTensorOutput(); @@ -233,10 +278,84 @@ public void testRunWithQuestionJson() throws IOException { verify(client).execute(any(), any(), any()); } + @Test + public void testRunEmptyResponseWithNotEnableContentGeneration() throws IOException { + ActionListener mockListener = mock(ActionListener.class); + Map paramsWithNotEnableContentGeneration = new HashMap<>(params); + paramsWithNotEnableContentGeneration.put(RAGTool.CONTENT_GENERATION_FIELD, "false"); + + RAGTool rAGtoolWithNotEnableContentGeneration = RAGTool.Factory.getInstance().create(paramsWithNotEnableContentGeneration); + + NamedXContentRegistry mockNamedXContentRegistry = getQueryNamedXContentRegistry(); + rAGtoolWithNotEnableContentGeneration.setXContentRegistry(mockNamedXContentRegistry); + + SearchResponse mockedEmptySearchResponse = SearchResponse + .fromXContent( + JsonXContent.jsonXContent + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, mockedEmptySearchResponseString) + ); + + doAnswer(invocation -> { + SearchRequest searchRequest = invocation.getArgument(0); + assertEquals((long) TEST_DOC_SIZE, (long) searchRequest.source().size()); + ActionListener listener = invocation.getArgument(1); + listener.onResponse(mockedEmptySearchResponse); + return null; + }).when(client).search(any(), any()); + rAGtoolWithNotEnableContentGeneration.run(Map.of(INPUT_FIELD, "{question:'what is the population in seattle?'}"), mockListener); + + verify(client).search(any(), any()); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(mockListener).onResponse(responseCaptor.capture()); + assertEquals("Can not get any match from search result.", responseCaptor.getValue()); + + } + + @Test + public void testRunResponseWithNotEnableContentGeneration() throws IOException { + ActionListener mockListener = mock(ActionListener.class); + Map paramsWithNotEnableContentGeneration = new HashMap<>(params); + paramsWithNotEnableContentGeneration.put(RAGTool.CONTENT_GENERATION_FIELD, "false"); + + RAGTool rAGtoolWithNotEnableContentGeneration = RAGTool.Factory.getInstance().create(paramsWithNotEnableContentGeneration); + + NamedXContentRegistry mockNamedXContentRegistry = getQueryNamedXContentRegistry(); + rAGtoolWithNotEnableContentGeneration.setXContentRegistry(mockNamedXContentRegistry); + + SearchResponse mockedNeuralSparseSearchResponse = SearchResponse + .fromXContent( + JsonXContent.jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.IGNORE_DEPRECATIONS, + mockedNeuralSparseSearchResponseString + ) + ); + + doAnswer(invocation -> { + SearchRequest searchRequest = invocation.getArgument(0); + assertEquals((long) TEST_DOC_SIZE, (long) searchRequest.source().size()); + ActionListener listener = invocation.getArgument(1); + listener.onResponse(mockedNeuralSparseSearchResponse); + return null; + }).when(client).search(any(), any()); + rAGtoolWithNotEnableContentGeneration.run(Map.of(INPUT_FIELD, "{question:'what is the population in seattle?'}"), mockListener); + + verify(client).search(any(), any()); + ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); + verify(mockListener).onResponse(responseCaptor.capture()); + assertEquals( + "{\"_index\":\"my-nlp-index\",\"_source\":{\"passage_text\":\"Hello world\",\"passage_embedding\":{\"!\":0.8708904,\"door\":0.8587369,\"hi\":2.3929274,\"worlds\":2.7839446,\"yes\":0.75845814,\"##world\":2.5432441,\"born\":0.2682308,\"nothing\":0.8625516,\"goodbye\":0.17146169,\"greeting\":0.96817183,\"birth\":1.2788506,\"come\":0.1623208,\"global\":0.4371151,\"it\":0.42951578,\"life\":1.5750692,\"thanks\":0.26481047,\"world\":4.7300377,\"tiny\":0.5462298,\"earth\":2.6555297,\"universe\":2.0308156,\"worldwide\":1.3903781,\"hello\":6.696973,\"so\":0.20279501,\"?\":0.67785245},\"id\":\"s1\"},\"_id\":\"1\",\"_score\":30.0029}\n" + + "{\"_index\":\"my-nlp-index\",\"_source\":{\"passage_text\":\"Hi planet\",\"passage_embedding\":{\"hi\":4.338913,\"planets\":2.7755864,\"planet\":5.0969057,\"mars\":1.7405145,\"earth\":2.6087382,\"hello\":3.3210192},\"id\":\"s2\"},\"_id\":\"2\",\"_score\":16.480486}\n", + responseCaptor.getValue() + ); + + } + @Test @SneakyThrows public void testRunWithRuntimeExceptionDuringSearch() { - NamedXContentRegistry mockNamedXContentRegistry = getNeuralQueryNamedXContentRegistry(); + NamedXContentRegistry mockNamedXContentRegistry = getQueryNamedXContentRegistry(); ragTool.setXContentRegistry(mockNamedXContentRegistry); doAnswer(invocation -> { SearchRequest searchRequest = invocation.getArgument(0); @@ -245,7 +364,7 @@ public void testRunWithRuntimeExceptionDuringSearch() { actionListener.onFailure(new RuntimeException("Failed to search index")); return null; }).when(client).search(any(), any()); - ragTool.run(Map.of(INPUT_FIELD, "hello?"), listener); + ragTool.run(Map.of(INPUT_FIELD, TEST_QUERY_TEXT), listener); verify(listener).onFailure(any(RuntimeException.class)); ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(Exception.class); verify(listener).onFailure(argumentCaptor.capture()); @@ -255,7 +374,7 @@ public void testRunWithRuntimeExceptionDuringSearch() { @Test @SneakyThrows public void testRunWithRuntimeExceptionDuringExecute() { - NamedXContentRegistry mockNamedXContentRegistry = getNeuralQueryNamedXContentRegistry(); + NamedXContentRegistry mockNamedXContentRegistry = getQueryNamedXContentRegistry(); ragTool.setXContentRegistry(mockNamedXContentRegistry); SearchResponse mockedSearchResponse = SearchResponse @@ -278,7 +397,7 @@ public void testRunWithRuntimeExceptionDuringExecute() { return null; }).when(client).execute(eq(MLPredictionTaskAction.INSTANCE), any(), any()); - ragTool.run(Map.of(INPUT_FIELD, "hello?"), listener); + ragTool.run(Map.of(INPUT_FIELD, TEST_QUERY_TEXT), listener); verify(listener).onFailure(any(RuntimeException.class)); ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(Exception.class); verify(listener).onFailure(argumentCaptor.capture()); @@ -292,50 +411,83 @@ public void testRunWithEmptyInput() { } @Test - public void testFactory() { + public void testFactoryNeuralQuery() { RAGTool.Factory factoryMock = new RAGTool.Factory(); - RAGTool.Factory.getInstance().init(client, TEST_XCONTENT_REGISTRY_FOR_QUERY); - factoryMock.init(client, TEST_XCONTENT_REGISTRY_FOR_QUERY); + RAGTool.Factory.getInstance().init(client, TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY); + factoryMock.init(client, TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY); String defaultDescription = factoryMock.getDefaultDescription(); assertEquals(RAGTool.DEFAULT_DESCRIPTION, defaultDescription); + assertEquals(factoryMock.getDefaultType(), RAGTool.TYPE); + assertEquals(factoryMock.getDefaultVersion(), null); assertNotNull(RAGTool.Factory.getInstance()); + RAGTool rAGtool1 = factoryMock.create(params); + VectorDBTool.Factory.getInstance().init(client, TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY); + params.put(VectorDBTool.MODEL_ID_FIELD, TEST_EMBEDDING_MODEL_ID); + VectorDBTool queryTool = VectorDBTool.Factory.getInstance().create(params); + RAGTool rAGtool2 = new RAGTool(client, TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY, TEST_INFERENCE_MODEL_ID, true, queryTool); - RAGTool rAGtool2 = new RAGTool( - client, - TEST_XCONTENT_REGISTRY_FOR_QUERY, - TEST_INDEX, - TEST_EMBEDDING_FIELD, - TEST_SOURCE_FIELDS, - DEFAULT_K, - TEST_DOC_SIZE, - TEST_EMBEDDING_MODEL_ID, - TEST_INFERENCE_MODEL_ID - ); + assertEquals(rAGtool1.getClient(), rAGtool2.getClient()); + assertEquals(rAGtool1.getInferenceModelId(), rAGtool2.getInferenceModelId()); + assertEquals(rAGtool1.getName(), rAGtool2.getName()); + assertEquals(rAGtool1.getQueryTool().getDocSize(), rAGtool2.getQueryTool().getDocSize()); + assertEquals(rAGtool1.getQueryTool().getIndex(), rAGtool2.getQueryTool().getIndex()); + assertEquals(rAGtool1.getQueryTool().getSourceFields(), rAGtool2.getQueryTool().getSourceFields()); + assertEquals(rAGtool1.getXContentRegistry(), rAGtool2.getXContentRegistry()); + assertEquals(rAGtool1.getQueryType(), rAGtool2.getQueryType()); + } + + @Test + public void testFactoryNeuralSparseQuery() { + RAGTool.Factory factoryMock = new RAGTool.Factory(); + RAGTool.Factory.getInstance().init(client, TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY); + factoryMock.init(client, TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY); + + String defaultDescription = factoryMock.getDefaultDescription(); + assertEquals(RAGTool.DEFAULT_DESCRIPTION, defaultDescription); + assertNotNull(RAGTool.Factory.getInstance()); + assertEquals(factoryMock.getDefaultType(), RAGTool.TYPE); + assertEquals(factoryMock.getDefaultVersion(), null); + + RAGTool rAGtool1 = factoryMock.create(params); + NeuralSparseSearchTool.Factory.getInstance().init(client, TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY); + NeuralSparseSearchTool queryTool = NeuralSparseSearchTool.Factory.getInstance().create(params); + RAGTool rAGtool2 = new RAGTool(client, TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY, TEST_INFERENCE_MODEL_ID, true, queryTool); assertEquals(rAGtool1.getClient(), rAGtool2.getClient()); - assertEquals(rAGtool1.getK(), rAGtool2.getK()); assertEquals(rAGtool1.getInferenceModelId(), rAGtool2.getInferenceModelId()); assertEquals(rAGtool1.getName(), rAGtool2.getName()); - assertEquals(rAGtool1.getDocSize(), rAGtool2.getDocSize()); - assertEquals(rAGtool1.getIndex(), rAGtool2.getIndex()); - assertEquals(rAGtool1.getEmbeddingModelId(), rAGtool2.getEmbeddingModelId()); - assertEquals(rAGtool1.getEmbeddingField(), rAGtool2.getEmbeddingField()); - assertEquals(rAGtool1.getSourceFields(), rAGtool2.getSourceFields()); + assertEquals(rAGtool1.getQueryTool().getDocSize(), rAGtool2.getQueryTool().getDocSize()); + assertEquals(rAGtool1.getQueryTool().getIndex(), rAGtool2.getQueryTool().getIndex()); + assertEquals(rAGtool1.getQueryTool().getSourceFields(), rAGtool2.getQueryTool().getSourceFields()); assertEquals(rAGtool1.getXContentRegistry(), rAGtool2.getXContentRegistry()); + assertEquals(rAGtool1.getQueryType(), rAGtool2.getQueryType()); } - private static NamedXContentRegistry getNeuralQueryNamedXContentRegistry() { + private static NamedXContentRegistry getQueryNamedXContentRegistry() { QueryBuilder matchAllQueryBuilder = new MatchAllQueryBuilder(); List entries = new ArrayList<>(); - NamedXContentRegistry.Entry entry = new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField("neural"), (p, c) -> { - p.map(); - return matchAllQueryBuilder; - }); - entries.add(entry); + NamedXContentRegistry.Entry neural_query_entry = new NamedXContentRegistry.Entry( + QueryBuilder.class, + new ParseField("neural"), + (p, c) -> { + p.map(); + return matchAllQueryBuilder; + } + ); + entries.add(neural_query_entry); + NamedXContentRegistry.Entry neural_sparse_query_entry = new NamedXContentRegistry.Entry( + QueryBuilder.class, + new ParseField("neural_sparse"), + (p, c) -> { + p.map(); + return matchAllQueryBuilder; + } + ); + entries.add(neural_sparse_query_entry); NamedXContentRegistry mockNamedXContentRegistry = new NamedXContentRegistry(entries); return mockNamedXContentRegistry; } diff --git a/src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java b/src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java index cc67604f..cce80d5b 100644 --- a/src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java @@ -48,7 +48,7 @@ public void testCreateTool() { assertEquals(TEST_K, tool.getK()); assertEquals("VectorDBTool", tool.getType()); assertEquals("VectorDBTool", tool.getName()); - assertEquals("Use this tool to search data in OpenSearch index.", VectorDBTool.Factory.getInstance().getDefaultDescription()); + assertEquals(VectorDBTool.DEFAULT_DESCRIPTION, VectorDBTool.Factory.getInstance().getDefaultDescription()); } @Test diff --git a/src/test/resources/org/opensearch/agent/tools/neural_sparse_tool_search_response.json b/src/test/resources/org/opensearch/agent/tools/neural_sparse_tool_search_response.json new file mode 100644 index 00000000..196e8a04 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/neural_sparse_tool_search_response.json @@ -0,0 +1,71 @@ +{ + "took" : 688, + "timed_out" : false, + "_shards" : { + "total" : 1, + "successful" : 1, + "skipped" : 0, + "failed" : 0 + }, + "hits" : { + "total" : { + "value" : 2, + "relation" : "eq" + }, + "max_score" : 30.0029, + "hits" : [ + { + "_index" : "my-nlp-index", + "_id" : "1", + "_score" : 30.0029, + "_source" : { + "passage_text" : "Hello world", + "passage_embedding" : { + "!" : 0.8708904, + "door" : 0.8587369, + "hi" : 2.3929274, + "worlds" : 2.7839446, + "yes" : 0.75845814, + "##world" : 2.5432441, + "born" : 0.2682308, + "nothing" : 0.8625516, + "goodbye" : 0.17146169, + "greeting" : 0.96817183, + "birth" : 1.2788506, + "come" : 0.1623208, + "global" : 0.4371151, + "it" : 0.42951578, + "life" : 1.5750692, + "thanks" : 0.26481047, + "world" : 4.7300377, + "tiny" : 0.5462298, + "earth" : 2.6555297, + "universe" : 2.0308156, + "worldwide" : 1.3903781, + "hello" : 6.696973, + "so" : 0.20279501, + "?" : 0.67785245 + }, + "id" : "s1" + } + }, + { + "_index" : "my-nlp-index", + "_id" : "2", + "_score" : 16.480486, + "_source" : { + "passage_text" : "Hi planet", + "passage_embedding" : { + "hi" : 4.338913, + "planets" : 2.7755864, + "planet" : 5.0969057, + "mars" : 1.7405145, + "earth" : 2.6087382, + "hello" : 3.3210192 + }, + "id" : "s2" + } + } + ] + } +} \ No newline at end of file From 2bd56edafac864f014a355aaf2ffb5acc319e502 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 26 Jan 2024 17:09:39 -0800 Subject: [PATCH 041/119] Fix SearchMonitorsTool bugs; add corresponding ITs (#151) (#153) * Fix search monitor bugs; add search monitor ITs * Remove unused fn * Clean up UT * Change to beforeEach * Fix detector_type bug --------- (cherry picked from commit 722bfd2e80f8cc922f282a44e8afebc3b69a8675) Signed-off-by: Tyler Ohlsen Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../tools/SearchAnomalyDetectorsTool.java | 2 +- .../agent/tools/SearchMonitorsTool.java | 172 +++++++----------- .../SearchAnomalyDetectorsToolTests.java | 2 +- .../agent/tools/SearchMonitorsToolTests.java | 113 ++++++------ .../integTest/BaseAgentToolsIT.java | 7 + .../integTest/SearchMonitorsToolIT.java | 93 +++++++++- 6 files changed, 223 insertions(+), 166 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java index 2bbbcfa7..22c3d0db 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java @@ -245,7 +245,7 @@ private void processHits(Map hitsAsMap, ActionListener sb.append("{"); sb.append("id=").append(hit.getId()).append(","); sb.append("name=").append(hit.getSourceAsMap().get("name")).append(","); - sb.append("type=").append(hit.getSourceAsMap().get("type")).append(","); + sb.append("type=").append(hit.getSourceAsMap().get("detector_type")).append(","); sb.append("description=").append(hit.getSourceAsMap().get("description")).append(","); sb.append("index=").append(hit.getSourceAsMap().get("indices")).append(","); sb.append("lastUpdateTime=").append(hit.getSourceAsMap().get("last_update_time")); diff --git a/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java index bbb9aaa7..304d598d 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java @@ -19,10 +19,7 @@ import org.opensearch.client.Client; import org.opensearch.client.node.NodeClient; import org.opensearch.commons.alerting.AlertingPluginInterface; -import org.opensearch.commons.alerting.action.GetMonitorRequest; -import org.opensearch.commons.alerting.action.GetMonitorResponse; import org.opensearch.commons.alerting.action.SearchMonitorRequest; -import org.opensearch.commons.alerting.model.Monitor; import org.opensearch.commons.alerting.model.ScheduledJob; import org.opensearch.core.action.ActionListener; import org.opensearch.index.query.BoolQueryBuilder; @@ -35,7 +32,6 @@ import org.opensearch.ml.common.spi.tools.Parser; import org.opensearch.ml.common.spi.tools.Tool; import org.opensearch.ml.common.spi.tools.ToolAnnotation; -import org.opensearch.rest.RestRequest; import org.opensearch.search.SearchHit; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.SortOrder; @@ -102,89 +98,73 @@ public void run(Map parameters, ActionListener listener) ? Integer.parseInt(parameters.get("startIndex")) : 0; - // If a monitor ID is specified, all other params will be ignored. Simply return the monitor details based on that ID - // via the get monitor transport action + List mustList = new ArrayList(); if (monitorId != null) { - GetMonitorRequest getMonitorRequest = new GetMonitorRequest(monitorId, 1L, RestRequest.Method.GET, null); - ActionListener getMonitorListener = ActionListener.wrap(response -> { - Monitor monitor = response.getMonitor(); - processGetMonitorHit(monitor, listener); - }, e -> { - // System index isn't initialized by default, so ignore such errors. Alerting plugin does not return the - // standard IndexNotFoundException so we parse the message instead - if (e.getMessage().contains("Configured indices are not found")) { - processGetMonitorHit(null, listener); - } else { - log.error("Failed to get monitor.", e); - listener.onFailure(e); - } - }); - AlertingPluginInterface.INSTANCE.getMonitor((NodeClient) client, getMonitorRequest, getMonitorListener); - } else { - List mustList = new ArrayList(); - if (monitorName != null) { - mustList.add(new TermQueryBuilder("monitor.name.keyword", monitorName)); - } - if (monitorNamePattern != null) { - mustList.add(new WildcardQueryBuilder("monitor.name.keyword", monitorNamePattern)); - } - if (enabled != null) { - mustList.add(new TermQueryBuilder("monitor.enabled", enabled)); + mustList.add(new TermQueryBuilder("_id", monitorId)); + } + if (monitorName != null) { + mustList.add(new TermQueryBuilder("monitor.name.keyword", monitorName)); + } + if (monitorNamePattern != null) { + mustList.add(new WildcardQueryBuilder("monitor.name.keyword", monitorNamePattern)); + } + if (enabled != null) { + mustList.add(new TermQueryBuilder("monitor.enabled", enabled)); + } + if (hasTriggers != null) { + NestedQueryBuilder nestedTriggerQuery = new NestedQueryBuilder( + "monitor.triggers", + new ExistsQueryBuilder("monitor.triggers"), + ScoreMode.None + ); + + BoolQueryBuilder triggerQuery = new BoolQueryBuilder(); + if (hasTriggers) { + triggerQuery.must(nestedTriggerQuery); + } else { + triggerQuery.mustNot(nestedTriggerQuery); } - if (hasTriggers != null) { - NestedQueryBuilder nestedTriggerQuery = new NestedQueryBuilder( - "monitor.triggers", - new ExistsQueryBuilder("monitor.triggers"), - ScoreMode.None + mustList.add(triggerQuery); + } + if (indices != null) { + mustList + .add( + new NestedQueryBuilder( + "monitor.inputs", + new WildcardQueryBuilder("monitor.inputs.search.indices", indices), + ScoreMode.None + ) ); + } - BoolQueryBuilder triggerQuery = new BoolQueryBuilder(); - if (hasTriggers) { - triggerQuery.must(nestedTriggerQuery); - } else { - triggerQuery.mustNot(nestedTriggerQuery); - } - mustList.add(triggerQuery); - } - if (indices != null) { - mustList - .add( - new NestedQueryBuilder( - "monitor.inputs", - new WildcardQueryBuilder("monitor.inputs.search.indices", indices), - ScoreMode.None - ) - ); + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + boolQueryBuilder.must().addAll(mustList); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .query(boolQueryBuilder) + .size(size) + .from(startIndex) + .sort(sortString, sortOrder); + + SearchRequest searchRequest = new SearchRequest().source(searchSourceBuilder).indices(ScheduledJob.SCHEDULED_JOBS_INDEX); + SearchMonitorRequest searchMonitorRequest = new SearchMonitorRequest(searchRequest); + + ActionListener searchMonitorListener = ActionListener.wrap(response -> { + List hits = Arrays.asList(response.getHits().getHits()); + Map hitsAsMap = hits.stream().collect(Collectors.toMap(SearchHit::getId, hit -> hit)); + processHits(hitsAsMap, listener); + + }, e -> { + // System index isn't initialized by default, so ignore such errors. Alerting plugin does not return the + // standard IndexNotFoundException so we parse the message instead + if (e.getMessage().contains("Configured indices are not found")) { + processHits(Collections.emptyMap(), listener); + } else { + log.error("Failed to search monitors.", e); + listener.onFailure(e); } + }); + AlertingPluginInterface.INSTANCE.searchMonitors((NodeClient) client, searchMonitorRequest, searchMonitorListener); - BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); - boolQueryBuilder.must().addAll(mustList); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() - .query(boolQueryBuilder) - .size(size) - .from(startIndex) - .sort(sortString, sortOrder); - - SearchRequest searchRequest = new SearchRequest().source(searchSourceBuilder).indices(ScheduledJob.SCHEDULED_JOBS_INDEX); - SearchMonitorRequest searchMonitorRequest = new SearchMonitorRequest(searchRequest); - - ActionListener searchMonitorListener = ActionListener.wrap(response -> { - List hits = Arrays.asList(response.getHits().getHits()); - Map hitsAsMap = hits.stream().collect(Collectors.toMap(SearchHit::getId, hit -> hit)); - processHits(hitsAsMap, listener); - - }, e -> { - // System index isn't initialized by default, so ignore such errors. Alerting plugin does not return the - // standard IndexNotFoundException so we parse the message instead - if (e.getMessage().contains("Configured indices are not found")) { - processHits(Collections.emptyMap(), listener); - } else { - log.error("Failed to search monitors.", e); - listener.onFailure(e); - } - }); - AlertingPluginInterface.INSTANCE.searchMonitors((NodeClient) client, searchMonitorRequest, searchMonitorListener); - } } @Override @@ -201,13 +181,14 @@ private void processHits(Map hitsAsMap, ActionListener StringBuilder sb = new StringBuilder(); sb.append("Monitors=["); for (SearchHit hit : hitsAsMap.values()) { + Map monitorAsMap = (Map) hit.getSourceAsMap().get("monitor"); sb.append("{"); sb.append("id=").append(hit.getId()).append(","); - sb.append("name=").append(hit.getSourceAsMap().get("name")).append(","); - sb.append("type=").append(hit.getSourceAsMap().get("type")).append(","); - sb.append("enabled=").append(hit.getSourceAsMap().get("enabled")).append(","); - sb.append("enabledTime=").append(hit.getSourceAsMap().get("enabled_time")).append(","); - sb.append("lastUpdateTime=").append(hit.getSourceAsMap().get("last_update_time")); + sb.append("name=").append(monitorAsMap.get("name")).append(","); + sb.append("type=").append(monitorAsMap.get("type")).append(","); + sb.append("enabled=").append(monitorAsMap.get("enabled")).append(","); + sb.append("enabledTime=").append(monitorAsMap.get("enabled_time")).append(","); + sb.append("lastUpdateTime=").append(monitorAsMap.get("last_update_time")); sb.append("}"); } sb.append("]"); @@ -215,25 +196,6 @@ private void processHits(Map hitsAsMap, ActionListener listener.onResponse((T) sb.toString()); } - private void processGetMonitorHit(Monitor monitor, ActionListener listener) { - StringBuilder sb = new StringBuilder(); - if (monitor != null) { - sb.append("Monitors=["); - sb.append("{"); - sb.append("id=").append(monitor.getId()).append(","); - sb.append("name=").append(monitor.getName()).append(","); - sb.append("type=").append(monitor.getType()).append(","); - sb.append("enabled=").append(monitor.getEnabled()).append(","); - sb.append("enabledTime=").append(monitor.getEnabledTime().toEpochMilli()).append(","); - sb.append("lastUpdateTime=").append(monitor.getLastUpdateTime().toEpochMilli()); - sb.append("}]"); - sb.append("TotalMonitors=1"); - } else { - sb.append("Monitors=[]TotalMonitors=0"); - } - listener.onResponse((T) sb.toString()); - } - /** * Factory for the {@link SearchMonitorsTool} */ diff --git a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java index c4f0c293..13213f85 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java @@ -116,7 +116,7 @@ public void testRunWithSingleAnomalyDetector() throws Exception { XContentBuilder content = XContentBuilder.builder(XContentType.JSON.xContent()); content.startObject(); content.field("name", testDetector.getName()); - content.field("type", testDetector.getDetectorType()); + content.field("detector_type", testDetector.getDetectorType()); content.field("description", testDetector.getDescription()); content.field("indices", testDetector.getIndices().get(0)); content.field("last_update_time", testDetector.getLastUpdateTime().toEpochMilli()); diff --git a/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java index 72338fec..4c5a690f 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java @@ -35,7 +35,6 @@ import org.opensearch.client.IndicesAdminClient; import org.opensearch.client.node.NodeClient; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.commons.alerting.action.GetMonitorResponse; import org.opensearch.commons.alerting.model.CronSchedule; import org.opensearch.commons.alerting.model.DataSources; import org.opensearch.commons.alerting.model.Monitor; @@ -96,29 +95,15 @@ public void setup() { @Test public void testRunWithNoMonitors() throws Exception { Tool tool = SearchMonitorsTool.Factory.getInstance().create(Collections.emptyMap()); - - SearchHit[] hits = new SearchHit[0]; - - TotalHits totalHits = new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO); - - SearchResponse getMonitorsResponse = new SearchResponse( - new SearchResponseSections(new SearchHits(hits, totalHits, 0), new Aggregations(new ArrayList<>()), null, false, null, null, 0), - null, - 0, - 0, - 0, - 0, - null, - null - ); - String expectedResponseStr = String.format("Monitors=[]TotalMonitors=%d", hits.length); + SearchResponse searchMonitorsResponse = getEmptySearchMonitorsResponse(); + String expectedResponseStr = "Monitors=[]TotalMonitors=0"; @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); doAnswer((invocation) -> { ActionListener responseListener = invocation.getArgument(2); - responseListener.onResponse(getMonitorsResponse); + responseListener.onResponse(searchMonitorsResponse); return null; }).when(nodeClient).execute(any(ActionType.class), any(), any()); @@ -132,21 +117,15 @@ public void testRunWithNoMonitors() throws Exception { public void testRunWithMonitorId() throws Exception { Tool tool = SearchMonitorsTool.Factory.getInstance().create(Collections.emptyMap()); - GetMonitorResponse getMonitorResponse = new GetMonitorResponse( - testMonitor.getId(), - 1L, - 2L, - 0L, - testMonitor, - Collections.emptyList() - ); + SearchResponse searchMonitorsResponse = getSearchMonitorsResponse(testMonitor); String expectedResponseStr = getExpectedResponseString(testMonitor); + @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); doAnswer((invocation) -> { - ActionListener responseListener = invocation.getArgument(2); - responseListener.onResponse(getMonitorResponse); + ActionListener responseListener = invocation.getArgument(2); + responseListener.onResponse(searchMonitorsResponse); return null; }).when(nodeClient).execute(any(ActionType.class), any(), any()); @@ -160,15 +139,15 @@ public void testRunWithMonitorId() throws Exception { public void testRunWithMonitorIdNotFound() throws Exception { Tool tool = SearchMonitorsTool.Factory.getInstance().create(Collections.emptyMap()); - GetMonitorResponse responseWithNullMonitor = new GetMonitorResponse(testMonitor.getId(), 1L, 2L, 0L, null, Collections.emptyList()); - String expectedResponseStr = String.format("Monitors=[]TotalMonitors=0"); + SearchResponse searchMonitorsResponse = getEmptySearchMonitorsResponse(); + String expectedResponseStr = "Monitors=[]TotalMonitors=0"; @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); doAnswer((invocation) -> { - ActionListener responseListener = invocation.getArgument(2); - responseListener.onResponse(responseWithNullMonitor); + ActionListener responseListener = invocation.getArgument(2); + responseListener.onResponse(searchMonitorsResponse); return null; }).when(nodeClient).execute(any(ActionType.class), any(), any()); @@ -182,29 +161,7 @@ public void testRunWithMonitorIdNotFound() throws Exception { public void testRunWithSingleMonitor() throws Exception { Tool tool = SearchMonitorsTool.Factory.getInstance().create(Collections.emptyMap()); - XContentBuilder content = XContentBuilder.builder(XContentType.JSON.xContent()); - content.startObject(); - content.field("name", testMonitor.getName()); - content.field("type", testMonitor.getType()); - content.field("enabled", Boolean.toString(testMonitor.getEnabled())); - content.field("enabled_time", Long.toString(testMonitor.getEnabledTime().toEpochMilli())); - content.field("last_update_time", Long.toString(testMonitor.getLastUpdateTime().toEpochMilli())); - content.endObject(); - SearchHit[] hits = new SearchHit[1]; - hits[0] = new SearchHit(0, testMonitor.getId(), null, null).sourceRef(BytesReference.bytes(content)); - - TotalHits totalHits = new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO); - - SearchResponse getMonitorsResponse = new SearchResponse( - new SearchResponseSections(new SearchHits(hits, totalHits, 0), new Aggregations(new ArrayList<>()), null, false, null, null, 0), - null, - 0, - 0, - 0, - 0, - null, - null - ); + SearchResponse searchMonitorsResponse = getSearchMonitorsResponse(testMonitor); String expectedResponseStr = getExpectedResponseString(testMonitor); @SuppressWarnings("unchecked") @@ -212,7 +169,7 @@ public void testRunWithSingleMonitor() throws Exception { doAnswer((invocation) -> { ActionListener responseListener = invocation.getArgument(2); - responseListener.onResponse(getMonitorsResponse); + responseListener.onResponse(searchMonitorsResponse); return null; }).when(nodeClient).execute(any(ActionType.class), any(), any()); @@ -254,6 +211,50 @@ public void testValidate() { assertTrue(tool.validate(nullParams)); } + private SearchResponse getSearchMonitorsResponse(Monitor monitor) throws Exception { + XContentBuilder content = XContentBuilder.builder(XContentType.JSON.xContent()); + content + .startObject() + .startObject("monitor") + .field("name", monitor.getName()) + .field("type", monitor.getType()) + .field("enabled", Boolean.toString(monitor.getEnabled())) + .field("enabled_time", Long.toString(monitor.getEnabledTime().toEpochMilli())) + .field("last_update_time", Long.toString(monitor.getLastUpdateTime().toEpochMilli())) + .endObject() + .endObject(); + SearchHit[] hits = new SearchHit[1]; + hits[0] = new SearchHit(0, monitor.getId(), null, null).sourceRef(BytesReference.bytes(content)); + + TotalHits totalHits = new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO); + + return new SearchResponse( + new SearchResponseSections(new SearchHits(hits, totalHits, 0), new Aggregations(new ArrayList<>()), null, false, null, null, 0), + null, + 0, + 0, + 0, + 0, + null, + null + ); + } + + private SearchResponse getEmptySearchMonitorsResponse() throws Exception { + SearchHit[] hits = new SearchHit[0]; + TotalHits totalHits = new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO); + return new SearchResponse( + new SearchResponseSections(new SearchHits(hits, totalHits, 0), new Aggregations(new ArrayList<>()), null, false, null, null, 0), + null, + 0, + 0, + 0, + 0, + null, + null + ); + } + private String getExpectedResponseString(Monitor testMonitor) { return String .format( diff --git a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java index 993e2b08..21b2a8d4 100644 --- a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java +++ b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java @@ -115,6 +115,13 @@ protected String deployModel(String modelId) { return parseFieldFromResponse(response, MLTask.TASK_ID_FIELD).toString(); } + protected String indexMonitor(String monitorAsJsonString) { + Response response = makeRequest(client(), "POST", "_plugins/_alerting/monitors", null, monitorAsJsonString, null); + + assertEquals(RestStatus.CREATED, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + return parseFieldFromResponse(response, "_id").toString(); + } + @SneakyThrows protected Map waitTaskComplete(String taskId) { for (int i = 0; i < MAX_TASK_RESULT_QUERY_TIME_IN_SECOND; i++) { diff --git a/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java b/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java index c94bf8e8..2ed9c726 100644 --- a/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java +++ b/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java @@ -9,16 +9,23 @@ import java.nio.file.Files; import java.nio.file.Path; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.json.JSONObject; import org.junit.After; import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; import lombok.SneakyThrows; +import lombok.extern.log4j.Log4j2; +@Log4j2 public class SearchMonitorsToolIT extends BaseAgentToolsIT { private String registerAgentRequestBody; - private static final String monitorId = "foo-id"; private static final String monitorName = "foo-name"; + private static final String monitorName2 = "bar-name"; @Before @SneakyThrows @@ -37,6 +44,12 @@ public void setUp() { ); } + @BeforeEach + @SneakyThrows + public void prepareTest() { + deleteSystemIndices(); + } + @After @SneakyThrows public void tearDown() { @@ -47,12 +60,86 @@ public void tearDown() { @SneakyThrows public void testSearchMonitorsToolInFlowAgent_withNoSystemIndex() { - deleteSystemIndices(); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"monitorName\": \"" + monitorName + "\"}}"; String result = executeAgent(agentId, agentInput); assertEquals("Monitors=[]TotalMonitors=0", result); } - // TODO: Add IT to test against sample monitor data + @SneakyThrows + public void testSearchMonitorsToolInFlowAgent_searchById() { + String monitorId = indexMonitor(getMonitorJsonString(monitorName, true)); + + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"monitorId\": \"" + monitorId + "\"}}"; + + String result = executeAgent(agentId, agentInput); + assertTrue(result.contains(String.format("name=%s", monitorName))); + assertTrue(result.contains("TotalMonitors=1")); + } + + @SneakyThrows + public void testSearchMonitorsToolInFlowAgent_singleMonitor_noFilter() { + indexMonitor(getMonitorJsonString(monitorName, true)); + + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{}}"; + String result = executeAgent(agentId, agentInput); + assertTrue(result.contains(String.format("name=%s", monitorName))); + assertTrue(result.contains("TotalMonitors=1")); + } + + @SneakyThrows + public void testSearchMonitorsToolInFlowAgent_singleMonitor_filter() { + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"monitorId\": \"" + "foo-id" + "\"}}"; + String result = executeAgent(agentId, agentInput); + assertTrue(result.contains("TotalMonitors=0")); + } + + @SneakyThrows + public void testSearchMonitorsToolInFlowAgent_multipleMonitors_noFilter() { + indexMonitor(getMonitorJsonString(monitorName, true)); + indexMonitor(getMonitorJsonString(monitorName2, false)); + + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{}}"; + String result = executeAgent(agentId, agentInput); + assertTrue(result.contains(String.format("name=%s", monitorName))); + assertTrue(result.contains(String.format("name=%s", monitorName2))); + assertTrue(result.contains("enabled=true")); + assertTrue(result.contains("enabled=false")); + assertTrue(result.contains("TotalMonitors=2")); + } + + @SneakyThrows + public void testSearchMonitorsToolInFlowAgent_multipleMonitors_filter() { + indexMonitor(getMonitorJsonString(monitorName, true)); + indexMonitor(getMonitorJsonString(monitorName2, false)); + + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"monitorName\": \"" + monitorName + "\"}}"; + String result = executeAgent(agentId, agentInput); + assertTrue(result.contains(String.format("name=%s", monitorName))); + assertFalse(result.contains(String.format("name=%s", monitorName2))); + assertTrue(result.contains("enabled=true")); + assertTrue(result.contains("TotalMonitors=1")); + } + + // Helper fn to create the JSON string to use in a REST request body when indexing a monitor + private String getMonitorJsonString(String monitorName, boolean enabled) { + JSONObject jsonObj = new JSONObject(); + jsonObj.put("type", "monitor"); + jsonObj.put("name", monitorName); + jsonObj.put("enabled", String.valueOf(enabled)); + jsonObj.put("inputs", Collections.emptyList()); + jsonObj.put("triggers", Collections.emptyList()); + Map scheduleMap = new HashMap(); + Map periodMap = new HashMap(); + periodMap.put("interval", 5); + periodMap.put("unit", "MINUTES"); + scheduleMap.put("period", periodMap); + jsonObj.put("schedule", scheduleMap); + return jsonObj.toString(); + } } From 53932ebacd23c20b5817cb7012bb556d96069287 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 13:33:17 -0800 Subject: [PATCH 042/119] Tune descriptions; improve monitor type (#154) (#155) (cherry picked from commit 1626af1a5d5b42745f3769709c3c5c5f2c837743) Signed-off-by: Tyler Ohlsen Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../java/org/opensearch/agent/tools/SearchAlertsTool.java | 2 +- .../opensearch/agent/tools/SearchAnomalyDetectorsTool.java | 2 +- .../org/opensearch/agent/tools/SearchAnomalyResultsTool.java | 2 +- .../java/org/opensearch/agent/tools/SearchMonitorsTool.java | 4 ++-- .../org/opensearch/agent/tools/SearchMonitorsToolTests.java | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java index b27d0887..5abd6121 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java @@ -33,7 +33,7 @@ public class SearchAlertsTool implements Tool { public static final String TYPE = "SearchAlertsTool"; private static final String DEFAULT_DESCRIPTION = - "This is a tool that finds alert trigger information. It takes 12 optional argument named sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is monitor_name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0), and searchString which defines the search string to use for searching a specific alert (default is an empty String), and severityLevel which defines the severity level to filter for (default is ALL), and alertState which defines the alert state to filter for (default is ALL), and monitorId which defines the monitor ID to filter for, and alertIndex which defines the alert index to search from (default is null), and monitorIds which defines the list of monitor IDs to filter for, and workflowIds which defines the list of workflow IDs to filter for(default is null), and alertIds which defines the list of alert IDs to filter for (default is null). The tool returns 2 values: a list of alerts (each containining id, version, schema version, monitor ID, workflow ID, workflow name, monitor name, monitor version, monitor user, trigger ID, trigger name, finding IDs, related doc IDs, state, start time, end time, last notifcation time, acknowledged time, error message, error history, severity, action execution results, aggregation result bucket, execution ID, associated alert IDs), and the total number of alerts."; + "This is a tool that finds alerts. It takes 12 optional argument named sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is monitor_name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the paginated index to start from (default is 0), and searchString which defines the search string to use for searching a specific alert (default is an empty String), and severityLevel which defines the severity level to filter for as an integer (default is ALL), and alertState which defines the alert state to filter for (options are ALL, ACTIVE, ERROR, COMPLETED, or ACKNOWLEDGED, default is ALL), and monitorId which defines the associated monitor ID to filter for, and alertIndex which defines the alert index to search from (default is null), and monitorIds which defines the list of monitor IDs to filter for, and workflowIds which defines the list of workflow IDs to filter for(default is null), and alertIds which defines the list of alert IDs to filter for (default is null). The tool returns 2 values: a list of alerts (each containining the alert id, version, schema version, monitor ID, workflow ID, workflow name, monitor name, monitor version, monitor user, trigger ID, trigger name, finding IDs, related doc IDs, state, start time in epoch milliseconds, end time in epoch milliseconds, last notification time in epoch milliseconds, acknowledged time in epoch milliseconds, error message, error history, severity, action execution results, aggregation result bucket, execution ID, associated alert IDs), and the total number of alerts."; @Setter @Getter diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java index 22c3d0db..26e84a4b 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java @@ -49,7 +49,7 @@ public class SearchAnomalyDetectorsTool implements Tool { public static final String TYPE = "SearchAnomalyDetectorsTool"; private static final String DEFAULT_DESCRIPTION = - "This is a tool that searches anomaly detectors. It takes 12 optional arguments named detectorName which is the explicit name of the monitor (default is null), and detectorNamePattern which is a wildcard query to match detector name (default is null), and indices which defines the index being detected (default is null), and highCardinality which defines whether the anomaly detector is high cardinality (default is null), and lastUpdateTime which defines the latest update time of the anomaly detector (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0), and running which defines whether the anomaly detector is running (default is null, indicating both), and disabled which defines whether the anomaly detector is disabled (default is null, indicating both), and failed which defines whether the anomaly detector has failed (default is null, indicating both). The tool returns 2 values: a list of anomaly detectors (each containing the id, name, type, description, index, last update time), and the total number of anomaly results."; + "This is a tool that searches anomaly detectors. It takes 12 optional arguments named detectorName which is the explicit name of the monitor (default is null), and detectorNamePattern which is a wildcard query to match detector name (default is null), and indices which defines the index being detected (default is null), and highCardinality which defines whether the anomaly detector is high cardinality (synonymous with multi-entity) of non-high-cardinality (synonymous with single-entity) (default is null, indicating both), and lastUpdateTime which defines the latest update time of the anomaly detector in epoch milliseconds (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the paginated index to start from (default is 0), and running which defines whether the anomaly detector is running (default is null, indicating both), and disabled which defines whether the anomaly detector is disabled (default is null, indicating both), and failed which defines whether the anomaly detector has failed (default is null, indicating both). The tool returns 2 values: a list of anomaly detectors (each containing the detector id, detector name, detector type indicating multi-entity or single-entity (where multi-entity also means high-cardinality), detector description, name of the configured index, last update time in epoch milliseconds), and the total number of anomaly detectors."; @Setter @Getter diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java index 701c707c..832c6297 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java @@ -40,7 +40,7 @@ public class SearchAnomalyResultsTool implements Tool { public static final String TYPE = "SearchAnomalyResultsTool"; private static final String DEFAULT_DESCRIPTION = - "This is a tool that searches anomaly results. It takes 9 arguments named detectorId which defines the detector ID to filter for (default is null), and realtime which defines whether the anomaly is real time, and anomalyGradeThreshold which defines the threshold for anomaly grade (a number between 0 and 1 that indicates how anomalous a data point is) (default is 0), and dataStartTime which defines the start time of the anomaly query (default is null), and dataEndTime which defines the end time of the anomaly query (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is desc), and sortString which which defines how to sort the results (default is data_start_time), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0). The tool returns 2 values: a list of anomaly results (where each result contains the detector ID, the anomaly grade, and the confidence), and the total number of anomaly results."; + "This is a tool that searches anomaly results. It takes 9 arguments named detectorId which defines the detector ID to filter for (default is null), and realtime which defines whether the anomaly results are from a realtime detector (set to false to only get results from historical analyses) (default is null), and anomalyGradeThreshold which defines the threshold for anomaly grade (a number between 0 and 1 that indicates how anomalous a data point is) (default is greater than 0), and dataStartTime which defines the start time of the anomaly data in epoch milliseconds (default is null), and dataEndTime which defines the end time of the anomaly data in epoch milliseconds (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is desc), and sortString which defines how to sort the results (default is data_start_time), and size which defines the number of anomalies to be returned (default is 20), and startIndex which defines the paginated index to start from (default is 0). The tool returns 2 values: a list of anomaly results (where each result contains the detector ID, the anomaly grade, and the confidence), and the total number of anomaly results."; @Setter @Getter diff --git a/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java index 304d598d..433994cf 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchMonitorsTool.java @@ -45,7 +45,7 @@ public class SearchMonitorsTool implements Tool { public static final String TYPE = "SearchMonitorsTool"; private static final String DEFAULT_DESCRIPTION = - "This is a tool that searches alerting monitors. It takes 10 optional arguments named monitorId which defines the monitor ID to filter for (default is null), and monitorName which defines explicit name of the monitor (default is null), and monitorNamePattern which is a wildcard query to match detector name (default is null), and enabled which defines whether the monitor is enabled (default is null, indicating both), and hasTriggers which defines whether the monitor has triggers enabled (default is null, indicating both), and indices which defines the index being monitored (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the index to start from (default is 0). The tool returns 2 values: a list of alerting monitors (each containining ID, name, type, enabled, enabled time, last update time), and the total number of monitors."; + "This is a tool that searches alerting monitors. It takes 10 optional arguments named monitorId which defines the monitor ID to filter for (default is null), and monitorName which defines explicit name of the monitor (default is null), and monitorNamePattern which is a wildcard query to match monitor name (default is null), and enabled which defines whether the monitor is enabled (default is null, indicating both enabled and disabled), and hasTriggers which defines whether the monitor has triggers enabled (default is null, indicating both), and indices which defines the index being monitored (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the paginated index to start from (default is 0). The tool returns 2 values: a list of alerting monitors (each containining monitor ID, monitor name, monitor type (indicating query-level, document-level, or bucket-level monitor types), enabled, enabled time in epoch milliseconds, last update time in epoch milliseconds), and the total number of alerting monitors."; @Setter @Getter private String name = TYPE; @@ -185,7 +185,7 @@ private void processHits(Map hitsAsMap, ActionListener sb.append("{"); sb.append("id=").append(hit.getId()).append(","); sb.append("name=").append(monitorAsMap.get("name")).append(","); - sb.append("type=").append(monitorAsMap.get("type")).append(","); + sb.append("type=").append(monitorAsMap.get("monitor_type")).append(","); sb.append("enabled=").append(monitorAsMap.get("enabled")).append(","); sb.append("enabledTime=").append(monitorAsMap.get("enabled_time")).append(","); sb.append("lastUpdateTime=").append(monitorAsMap.get("last_update_time")); diff --git a/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java index 4c5a690f..00bfd4d1 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java @@ -217,7 +217,7 @@ private SearchResponse getSearchMonitorsResponse(Monitor monitor) throws Excepti .startObject() .startObject("monitor") .field("name", monitor.getName()) - .field("type", monitor.getType()) + .field("monitor_type", monitor.getType()) .field("enabled", Boolean.toString(monitor.getEnabled())) .field("enabled_time", Long.toString(monitor.getEnabledTime().toEpochMilli())) .field("last_update_time", Long.toString(monitor.getLastUpdateTime().toEpochMilli())) From 7ac1fb5b6a4292fb9cc6b37031aaba7a14bd4551 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 14:22:58 +0800 Subject: [PATCH 043/119] Fix boolean parse (#157) (#158) * fix boolean parse * fix boolean parse --------- (cherry picked from commit b8229129bbd12bdbd85ce7d5568f86f2132230d0) Signed-off-by: xinyual Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- src/main/java/org/opensearch/agent/tools/PPLTool.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index c008d523..09fe1d3a 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -261,7 +261,7 @@ public PPLTool create(Map map) { (String) map.get("model_id"), (String) map.getOrDefault("prompt", ""), (String) map.getOrDefault("model_type", ""), - (boolean) map.getOrDefault("execute", true) + Boolean.valueOf((String) map.getOrDefault("execute", "true")) ); } From 4d88e09d8919ea83cc7972684f697ec740b912f5 Mon Sep 17 00:00:00 2001 From: zane-neo Date: Wed, 31 Jan 2024 10:54:04 +0800 Subject: [PATCH 044/119] Onboarding skills (#163) * Fix onboarding skills repo issues (#156) Signed-off-by: zane-neo * change publish workflow (#159) Signed-off-by: zane-neo * Change jdk version Signed-off-by: zane-neo --------- Signed-off-by: zane-neo --- .github/workflows/maven-publish.yml | 4 ++-- build.gradle | 7 ++++--- settings.gradle | 2 +- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/maven-publish.yml b/.github/workflows/maven-publish.yml index 9786fa52..fa6a71f6 100644 --- a/.github/workflows/maven-publish.yml +++ b/.github/workflows/maven-publish.yml @@ -22,7 +22,7 @@ jobs: - uses: actions/setup-java@v3 with: distribution: temurin # Temurin is a distribution of adoptium - java-version: 17 + java-version: 21 - uses: actions/checkout@v3 - uses: aws-actions/configure-aws-credentials@v1 with: @@ -34,4 +34,4 @@ jobs: export SONATYPE_PASSWORD=$(aws secretsmanager get-secret-value --secret-id maven-snapshots-password --query SecretString --output text) echo "::add-mask::$SONATYPE_USERNAME" echo "::add-mask::$SONATYPE_PASSWORD" - ./gradlew publishShadowPublicationToSnapshotsRepository + ./gradlew publishPluginZipPublicationToSnapshotsRepository diff --git a/build.gradle b/build.gradle index 9d857f9c..9f1f09cb 100644 --- a/build.gradle +++ b/build.gradle @@ -17,9 +17,9 @@ buildscript { repositories { mavenLocal() - mavenCentral() - maven { url "https://plugins.gradle.org/m2/" } maven { url "https://aws.oss.sonatype.org/content/repositories/snapshots" } + maven { url "https://plugins.gradle.org/m2/" } + mavenCentral() } dependencies { @@ -45,7 +45,7 @@ repositories { } allprojects { - group 'org.opensearch.plugin' + group 'org.opensearch' version = opensearch_version.tokenize('-')[0] + '.0' if (buildVersionQualifier) { version += "-${buildVersionQualifier}" @@ -176,6 +176,7 @@ dependencyLicenses.enabled = false loggerUsageCheck.enabled = false testingConventions.enabled = false thirdPartyAudit.enabled = false +publishNebulaPublicationToMavenLocal.enabled = false test { testLogging { diff --git a/settings.gradle b/settings.gradle index 26eac06a..bea9b83a 100644 --- a/settings.gradle +++ b/settings.gradle @@ -3,4 +3,4 @@ * SPDX-License-Identifier: Apache-2.0 */ -rootProject.name = 'agent-tools' +rootProject.name = 'opensearch-skills' From 55ef7beeaa7e1a142746824fcc71b2af8718d29f Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 31 Jan 2024 13:28:52 +0800 Subject: [PATCH 045/119] Fix the flaky test due to m_l_limit_exceeded_exception (#150) (#164) * increase the CB threshold, delete model after test * add log * add wait time * enhancement: wait model undeploy before delete; refactor the wait response logic * modify ci yml --------- (cherry picked from commit 0791c34f12f3192d104046842ce70914e7acf7c7) Signed-off-by: zhichao-aws Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .github/workflows/ci.yml | 12 ++--- .../integTest/BaseAgentToolsIT.java | 52 +++++++++++++++---- .../integTest/NeuralSparseSearchToolIT.java | 1 + .../integTest/ToolIntegrationTest.java | 5 ++ 4 files changed, 51 insertions(+), 19 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 87f9899c..aa65ae37 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -35,10 +35,7 @@ jobs: needs: Get-CI-Image-Tag strategy: matrix: - java: - - 11 - - 17 - - 21.0.1 + java: [11, 17, 21] name: Build and Test skills plugin on Linux runs-on: ubuntu-latest container: @@ -71,7 +68,7 @@ jobs: build-MacOS: strategy: matrix: - java: [ 11, 17 ] + java: [11, 17, 21] name: Build and Test skills Plugin on MacOS needs: Get-CI-Image-Tag @@ -95,10 +92,7 @@ jobs: build-windows: strategy: matrix: - java: - - 11 - - 17 - - 21.0.1 + java: [11, 17, 21] name: Build and Test skills plugin on Windows needs: Get-CI-Image-Tag runs-on: windows-latest diff --git a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java index 21b2a8d4..38d43377 100644 --- a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java +++ b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java @@ -10,6 +10,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.function.Predicate; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; @@ -35,6 +36,7 @@ import org.opensearch.ml.common.MLTask; import org.opensearch.ml.common.MLTaskState; import org.opensearch.ml.common.input.execute.agent.AgentMLInput; +import org.opensearch.ml.common.model.MLModelState; import org.opensearch.ml.common.output.model.ModelTensor; import org.opensearch.ml.common.output.model.ModelTensorOutput; import org.opensearch.ml.common.output.model.ModelTensors; @@ -57,6 +59,7 @@ public void updateClusterSettings() { updateClusterSettings("plugins.ml_commons.only_run_on_ml_node", false); // default threshold for native circuit breaker is 90, it may be not enough on test runner machine updateClusterSettings("plugins.ml_commons.native_memory_threshold", 100); + updateClusterSettings("plugins.ml_commons.jvm_heap_memory_threshold", 100); updateClusterSettings("plugins.ml_commons.allow_registering_model_via_url", true); } @@ -123,26 +126,35 @@ protected String indexMonitor(String monitorAsJsonString) { } @SneakyThrows - protected Map waitTaskComplete(String taskId) { + protected Map waitResponseMeetingCondition( + String method, + String endpoint, + String jsonEntity, + Predicate> condition + ) { for (int i = 0; i < MAX_TASK_RESULT_QUERY_TIME_IN_SECOND; i++) { - Response response = makeRequest(client(), "GET", "/_plugins/_ml/tasks/" + taskId, null, (String) null, null); + Response response = makeRequest(client(), method, endpoint, null, jsonEntity, null); assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); Map responseInMap = parseResponseToMap(response); - String state = responseInMap.get(MLTask.STATE_FIELD).toString(); - if (state.equals(MLTaskState.COMPLETED.toString())) { + if (condition.test(responseInMap)) { return responseInMap; } - if (state.equals(MLTaskState.FAILED.toString()) - || state.equals(MLTaskState.CANCELLED.toString()) - || state.equals(MLTaskState.COMPLETED_WITH_ERROR.toString())) { - fail("The task failed with state " + state); - } + logger.info("The " + i + "-th response: " + responseInMap.toString()); Thread.sleep(DEFAULT_TASK_RESULT_QUERY_INTERVAL_IN_MILLISECOND); } - fail("The task failed to complete after " + MAX_TASK_RESULT_QUERY_TIME_IN_SECOND + " seconds."); + fail("The response failed to meet condition after " + MAX_TASK_RESULT_QUERY_TIME_IN_SECOND + " seconds."); return null; } + @SneakyThrows + protected Map waitTaskComplete(String taskId) { + Predicate> condition = responseInMap -> { + String state = responseInMap.get(MLTask.STATE_FIELD).toString(); + return state.equals(MLTaskState.COMPLETED.toString()); + }; + return waitResponseMeetingCondition("GET", "/_plugins/_ml/tasks/" + taskId, (String) null, condition); + } + // Register the model then deploy it. Returns the model_id until the model is deployed protected String registerModelThenDeploy(String requestBody) { String registerModelTaskId = registerModel(requestBody); @@ -153,6 +165,26 @@ protected String registerModelThenDeploy(String requestBody) { return modelId; } + @SneakyThrows + private void waitModelUndeployed(String modelId) { + Predicate> condition = responseInMap -> { + String state = responseInMap.get(MLModel.MODEL_STATE_FIELD).toString(); + return !state.equals(MLModelState.DEPLOYED.toString()) + && !state.equals(MLModelState.DEPLOYING.toString()) + && !state.equals(MLModelState.PARTIALLY_DEPLOYED.toString()); + }; + waitResponseMeetingCondition("GET", "/_plugins/_ml/models/" + modelId, (String) null, condition); + return; + } + + @SneakyThrows + protected void deleteModel(String modelId) { + // need to undeploy first as model can be in use + makeRequest(client(), "POST", "/_plugins/_ml/models/" + modelId + "/_undeploy", null, (String) null, null); + waitModelUndeployed(modelId); + makeRequest(client(), "DELETE", "/_plugins/_ml/models/" + modelId, null, (String) null, null); + } + protected void createIndexWithConfiguration(String indexName, String indexConfiguration) throws Exception { Response response = makeRequest(client(), "PUT", indexName, null, indexConfiguration, null); Map responseInMap = parseResponseToMap(response); diff --git a/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java b/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java index 2dda2095..58431a0a 100644 --- a/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java +++ b/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java @@ -88,6 +88,7 @@ public void setUp() { public void tearDown() { super.tearDown(); deleteExternalIndices(); + deleteModel(modelId); } public void testNeuralSparseSearchToolInFlowAgent() { diff --git a/src/test/java/org/opensearch/integTest/ToolIntegrationTest.java b/src/test/java/org/opensearch/integTest/ToolIntegrationTest.java index aba39573..74c3d5cf 100644 --- a/src/test/java/org/opensearch/integTest/ToolIntegrationTest.java +++ b/src/test/java/org/opensearch/integTest/ToolIntegrationTest.java @@ -68,6 +68,11 @@ public void stopMockLLM() { server.stop(1); } + @After + public void deleteModel() { + deleteModel(modelId); + } + private String setUpConnector() { String url = String.format(Locale.ROOT, "http://127.0.0.1:%d/invoke", server.getAddress().getPort()); return createConnector( From eb70c182089fa4800b43c8ba9456e9d52f351060 Mon Sep 17 00:00:00 2001 From: Peter Zhu Date: Wed, 31 Jan 2024 16:33:04 -0500 Subject: [PATCH 046/119] Change 2.x maven-publish repo name to be skills and sync with main branch (#166) Signed-off-by: Peter Zhu --- .github/workflows/maven-publish.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/maven-publish.yml b/.github/workflows/maven-publish.yml index fa6a71f6..d07e5b31 100644 --- a/.github/workflows/maven-publish.yml +++ b/.github/workflows/maven-publish.yml @@ -11,7 +11,7 @@ jobs: build-and-publish-snapshots: strategy: fail-fast: false - if: github.repository == 'opensearch-project/agent-tools' + if: github.repository == 'opensearch-project/skills' runs-on: ubuntu-latest permissions: @@ -24,7 +24,7 @@ jobs: distribution: temurin # Temurin is a distribution of adoptium java-version: 21 - uses: actions/checkout@v3 - - uses: aws-actions/configure-aws-credentials@v1 + - uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.PUBLISH_SNAPSHOTS_ROLE }} aws-region: us-east-1 From bc56d78337c873253e80e5b66dfc4d7d1beeb871 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 31 Jan 2024 15:23:15 -0800 Subject: [PATCH 047/119] Improve IT coverage of all AD & Alerting tools (#165) (#167) * Add search alert IT cases * Add more IT * Turn off returning ad job * Add AD results IT; clean up constants * Use sample monitor for ingestion --------- (cherry picked from commit 21e964c61d6bfd2eeae1046d48ed62b8520f63e1) Signed-off-by: Tyler Ohlsen Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../tools/SearchAnomalyDetectorsTool.java | 4 +- .../agent/tools/utils/ToolConstants.java | 3 + .../integTest/BaseAgentToolsIT.java | 13 + .../integTest/SearchAlertsToolIT.java | 139 +- .../SearchAnomalyDetectorsToolIT.java | 94 +- .../integTest/SearchAnomalyResultsToolIT.java | 93 +- .../integTest/SearchMonitorsToolIT.java | 69 +- .../tools/alerting/alert_index_mappings.json | 173 +++ .../alerting_config_index_mappings.json | 1269 +++++++++++++++++ ...nt_of_search_alerts_tool_request_body.json | 0 ..._of_search_monitors_tool_request_body.json | 0 .../agent/tools/alerting/sample_alert.json | 23 + .../agent/tools/alerting/sample_monitor.json | 15 + .../detectors_index_mappings.json | 157 ++ ..._anomaly_detectors_tool_request_body.json} | 0 ...rch_anomaly_results_tool_request_body.json | 0 .../results_index_mappings.json | 161 +++ .../anomaly-detection/sample_detector.json | 48 + .../sample_index_mappings.json | 12 + .../anomaly-detection/sample_result.json | 19 + 20 files changed, 2177 insertions(+), 115 deletions(-) create mode 100644 src/test/resources/org/opensearch/agent/tools/alerting/alert_index_mappings.json create mode 100644 src/test/resources/org/opensearch/agent/tools/alerting/alerting_config_index_mappings.json rename src/test/resources/org/opensearch/agent/tools/{ => alerting}/register_flow_agent_of_search_alerts_tool_request_body.json (100%) rename src/test/resources/org/opensearch/agent/tools/{ => alerting}/register_flow_agent_of_search_monitors_tool_request_body.json (100%) create mode 100644 src/test/resources/org/opensearch/agent/tools/alerting/sample_alert.json create mode 100644 src/test/resources/org/opensearch/agent/tools/alerting/sample_monitor.json create mode 100644 src/test/resources/org/opensearch/agent/tools/anomaly-detection/detectors_index_mappings.json rename src/test/resources/org/opensearch/agent/tools/{register_flow_agent_of_search_detectors_tool_request_body.json => anomaly-detection/register_flow_agent_of_search_anomaly_detectors_tool_request_body.json} (100%) rename src/test/resources/org/opensearch/agent/tools/{ => anomaly-detection}/register_flow_agent_of_search_anomaly_results_tool_request_body.json (100%) create mode 100644 src/test/resources/org/opensearch/agent/tools/anomaly-detection/results_index_mappings.json create mode 100644 src/test/resources/org/opensearch/agent/tools/anomaly-detection/sample_detector.json create mode 100644 src/test/resources/org/opensearch/agent/tools/anomaly-detection/sample_index_mappings.json create mode 100644 src/test/resources/org/opensearch/agent/tools/anomaly-detection/sample_result.json diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java index 26e84a4b..a94b92f6 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java @@ -49,7 +49,7 @@ public class SearchAnomalyDetectorsTool implements Tool { public static final String TYPE = "SearchAnomalyDetectorsTool"; private static final String DEFAULT_DESCRIPTION = - "This is a tool that searches anomaly detectors. It takes 12 optional arguments named detectorName which is the explicit name of the monitor (default is null), and detectorNamePattern which is a wildcard query to match detector name (default is null), and indices which defines the index being detected (default is null), and highCardinality which defines whether the anomaly detector is high cardinality (synonymous with multi-entity) of non-high-cardinality (synonymous with single-entity) (default is null, indicating both), and lastUpdateTime which defines the latest update time of the anomaly detector in epoch milliseconds (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the paginated index to start from (default is 0), and running which defines whether the anomaly detector is running (default is null, indicating both), and disabled which defines whether the anomaly detector is disabled (default is null, indicating both), and failed which defines whether the anomaly detector has failed (default is null, indicating both). The tool returns 2 values: a list of anomaly detectors (each containing the detector id, detector name, detector type indicating multi-entity or single-entity (where multi-entity also means high-cardinality), detector description, name of the configured index, last update time in epoch milliseconds), and the total number of anomaly detectors."; + "This is a tool that searches anomaly detectors. It takes 12 optional arguments named detectorName which is the explicit name of the monitor (default is null), and detectorNamePattern which is a wildcard query to match detector name (default is null), and indices which defines the index or index pattern the detector is detecting over (default is null), and highCardinality which defines whether the anomaly detector is high cardinality (synonymous with multi-entity) of non-high-cardinality (synonymous with single-entity) (default is null, indicating both), and lastUpdateTime which defines the latest update time of the anomaly detector in epoch milliseconds (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the paginated index to start from (default is 0), and running which defines whether the anomaly detector is running (default is null, indicating both), and disabled which defines whether the anomaly detector is disabled (default is null, indicating both), and failed which defines whether the anomaly detector has failed (default is null, indicating both). The tool returns 2 values: a list of anomaly detectors (each containing the detector id, detector name, detector type indicating multi-entity or single-entity (where multi-entity also means high-cardinality), detector description, name of the configured index, last update time in epoch milliseconds), and the total number of anomaly detectors."; @Setter @Getter @@ -160,7 +160,7 @@ public void run(Map parameters, ActionListener listener) GetAnomalyDetectorRequest profileRequest = new GetAnomalyDetectorRequest( hit.getId(), Versions.MATCH_ANY, - true, + false, true, "", "", diff --git a/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java b/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java index e6d95afe..2a90ec7e 100644 --- a/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java +++ b/src/main/java/org/opensearch/agent/tools/utils/ToolConstants.java @@ -20,7 +20,10 @@ public static enum DetectorStateString { // System indices constants are not cleanly exposed from the AD & Alerting plugins, so we persist our // own constants here. public static final String AD_RESULTS_INDEX_PATTERN = ".opendistro-anomaly-results*"; + public static final String AD_RESULTS_INDEX = ".opendistro-anomaly-results"; public static final String AD_DETECTORS_INDEX = ".opendistro-anomaly-detectors"; public static final String ALERTING_CONFIG_INDEX = ".opendistro-alerting-config"; + public static final String ALERTING_ALERTS_INDEX = ".opendistro-alerting-alerts"; + } diff --git a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java index 38d43377..2fdbe6ab 100644 --- a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java +++ b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java @@ -125,6 +125,13 @@ protected String indexMonitor(String monitorAsJsonString) { return parseFieldFromResponse(response, "_id").toString(); } + protected String indexDetector(String detectorAsJsonString) { + Response response = makeRequest(client(), "POST", "_plugins/_anomaly_detection/detectors", null, detectorAsJsonString, null); + + assertEquals(RestStatus.CREATED, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + return parseFieldFromResponse(response, "_id").toString(); + } + @SneakyThrows protected Map waitResponseMeetingCondition( String method, @@ -242,6 +249,12 @@ protected void addDocToIndex(String indexName, String docId, List fieldN assertEquals(RestStatus.CREATED, RestStatus.fromCode(response.getStatusLine().getStatusCode())); } + @SneakyThrows + protected void addDocToIndex(String indexName, String docId, String contents) { + Response response = makeRequest(client(), "POST", "/" + indexName + "/_doc/" + docId + "?refresh=true", null, contents, null); + assertEquals(RestStatus.CREATED, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + } + public String createAgent(String requestBody) { Response response = makeRequest(client(), "POST", "/_plugins/_ml/agents/_register", null, requestBody, null); assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); diff --git a/src/test/java/org/opensearch/integTest/SearchAlertsToolIT.java b/src/test/java/org/opensearch/integTest/SearchAlertsToolIT.java index 66e8f233..95872d22 100644 --- a/src/test/java/org/opensearch/integTest/SearchAlertsToolIT.java +++ b/src/test/java/org/opensearch/integTest/SearchAlertsToolIT.java @@ -10,29 +10,43 @@ import org.junit.After; import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; +import org.opensearch.agent.tools.utils.ToolConstants; + +import com.google.gson.Gson; +import com.google.gson.JsonObject; import lombok.SneakyThrows; public class SearchAlertsToolIT extends BaseAgentToolsIT { private String registerAgentRequestBody; + private String alertsIndexMappings; + private String alertingConfigIndexMappings; + private String sampleAlert; private static final String monitorId = "foo-id"; private static final String monitorName = "foo-name"; + private static final String registerAgentFilepath = + "org/opensearch/agent/tools/alerting/register_flow_agent_of_search_alerts_tool_request_body.json"; + private static final String alertsIndexMappingsFilepath = "org/opensearch/agent/tools/alerting/alert_index_mappings.json"; + private static final String alertingConfigIndexMappingsFilepath = + "org/opensearch/agent/tools/alerting/alerting_config_index_mappings.json"; + private static final String sampleAlertFilepath = "org/opensearch/agent/tools/alerting/sample_alert.json"; @Before @SneakyThrows public void setUp() { super.setUp(); - registerAgentRequestBody = Files - .readString( - Path - .of( - this - .getClass() - .getClassLoader() - .getResource("org/opensearch/agent/tools/register_flow_agent_of_search_alerts_tool_request_body.json") - .toURI() - ) - ); + registerAgentRequestBody = Files.readString(Path.of(this.getClass().getClassLoader().getResource(registerAgentFilepath).toURI())); + alertsIndexMappings = Files.readString(Path.of(this.getClass().getClassLoader().getResource(alertsIndexMappingsFilepath).toURI())); + alertingConfigIndexMappings = Files + .readString(Path.of(this.getClass().getClassLoader().getResource(alertingConfigIndexMappingsFilepath).toURI())); + sampleAlert = Files.readString(Path.of(this.getClass().getClassLoader().getResource(sampleAlertFilepath).toURI())); + } + + @BeforeEach + @SneakyThrows + public void prepareTest() { + deleteSystemIndices(); } @After @@ -45,13 +59,108 @@ public void tearDown() { @SneakyThrows public void testSearchAlertsToolInFlowAgent_withNoSystemIndex() { - deleteSystemIndices(); String agentId = createAgent(registerAgentRequestBody); - String agentInput = "{\"parameters\":{\"monitorId\": \"" + monitorId + "\"}}"; + String agentInput = "{\"parameters\":{}}"; + String result = executeAgent(agentId, agentInput); + assertEquals("Alerts=[]TotalAlerts=0", result); + } + + @SneakyThrows + public void testSearchAlertsToolInFlowAgent_withSystemIndex() { + setupAlertingSystemIndices(); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{}}"; String result = executeAgent(agentId, agentInput); assertEquals("Alerts=[]TotalAlerts=0", result); } - // TODO: Add IT to test against sample alerts data - // https://github.com/opensearch-project/skills/issues/136 + @SneakyThrows + public void testSearchAlertsToolInFlowAgent_singleAlert_noFilter() { + setupAlertingSystemIndices(); + ingestSampleAlert(monitorId, "1"); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{}}"; + String result = executeAgent(agentId, agentInput); + assertTrue(result.contains("TotalAlerts=1")); + } + + @SneakyThrows + public void testSearchAlertsToolInFlowAgent_singleAlert_filter_match() { + setupAlertingSystemIndices(); + ingestSampleAlert(monitorId, "1"); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"monitorId\": \"" + monitorId + "\"}}"; + String result = executeAgent(agentId, agentInput); + assertTrue(result.contains("TotalAlerts=1")); + } + + @SneakyThrows + public void testSearchAlertsToolInFlowAgent_singleAlert_filter_noMatch() { + setupAlertingSystemIndices(); + ingestSampleAlert(monitorId, "1"); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"monitorId\": \"" + monitorId + "foo" + "\"}}"; + String result = executeAgent(agentId, agentInput); + assertTrue(result.contains("TotalAlerts=0")); + } + + @SneakyThrows + public void testSearchAlertsToolInFlowAgent_multipleAlerts_noFilter() { + setupAlertingSystemIndices(); + ingestSampleAlert(monitorId, "1"); + ingestSampleAlert(monitorId + "foo", "2"); + ingestSampleAlert(monitorId + "bar", "3"); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{}}"; + String result = executeAgent(agentId, agentInput); + assertTrue(result.contains("TotalAlerts=3")); + } + + @SneakyThrows + public void testSearchAlertsToolInFlowAgent_multipleAlerts_filter() { + setupAlertingSystemIndices(); + ingestSampleAlert(monitorId, "1"); + ingestSampleAlert(monitorId + "foo", "2"); + ingestSampleAlert(monitorId + "bar", "3"); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"monitorId\": \"" + monitorId + "\"}}"; + String result = executeAgent(agentId, agentInput); + assertTrue(result.contains("TotalAlerts=1")); + } + + @SneakyThrows + public void testSearchAlertsToolInFlowAgent_multipleAlerts_complexParams() { + setupAlertingSystemIndices(); + String monitorId2 = monitorId + "2"; + String monitorId3 = monitorId + "3"; + ingestSampleAlert(monitorId, "1"); + ingestSampleAlert(monitorId2, "2"); + ingestSampleAlert(monitorId3, "3"); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"monitorIds\": " + + "[ \"" + + monitorId + + "\", \"" + + monitorId2 + + "\", \"" + + monitorId3 + + "\" ], " + + "\"sortOrder\": \"asc\", \"sortString\": \"monitor_name.keyword\", \"size\": 3, \"startIndex\": 0, \"severityLevel\": \"ALL\", \"alertState\": \"ALL\" } }"; + + String result = executeAgent(agentId, agentInput); + assertTrue(result.contains("TotalAlerts=3")); + } + + @SneakyThrows + private void setupAlertingSystemIndices() { + createIndexWithConfiguration(ToolConstants.ALERTING_ALERTS_INDEX, alertsIndexMappings); + createIndexWithConfiguration(ToolConstants.ALERTING_CONFIG_INDEX, alertingConfigIndexMappings); + } + + private void ingestSampleAlert(String monitorId, String docId) { + JsonObject sampleAlertJson = new Gson().fromJson(sampleAlert, JsonObject.class); + sampleAlertJson.addProperty("monitor_id", monitorId); + addDocToIndex(ToolConstants.ALERTING_ALERTS_INDEX, docId, sampleAlertJson.toString()); + } + } diff --git a/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java b/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java index aa406f8d..336d412a 100644 --- a/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java +++ b/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java @@ -11,31 +11,43 @@ import org.junit.After; import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; import org.opensearch.agent.tools.utils.ToolConstants; +import com.google.gson.Gson; +import com.google.gson.JsonArray; +import com.google.gson.JsonObject; + import lombok.SneakyThrows; public class SearchAnomalyDetectorsToolIT extends BaseAgentToolsIT { private String registerAgentRequestBody; - private static final String detectorId = "foo-id"; + private String detectorsIndexMappings; + private String sampleDetector; + private String sampleIndexMappings; private static final String detectorName = "foo-name"; + private static final String registerAgentFilepath = + "org/opensearch/agent/tools/anomaly-detection/register_flow_agent_of_search_anomaly_detectors_tool_request_body.json"; + private static final String detectorsIndexMappingsFilepath = + "org/opensearch/agent/tools/anomaly-detection/detectors_index_mappings.json"; + private static final String sampleDetectorFilepath = "org/opensearch/agent/tools/anomaly-detection/sample_detector.json"; + private static final String sampleIndexMappingsFilepath = "org/opensearch/agent/tools/anomaly-detection/sample_index_mappings.json"; @Before @SneakyThrows public void setUp() { super.setUp(); - registerAgentRequestBody = Files - .readString( - Path - .of( - this - .getClass() - .getClassLoader() - .getResource("org/opensearch/agent/tools/register_flow_agent_of_search_detectors_tool_request_body.json") - .toURI() - ) - ); - createDetectorsSystemIndex(detectorId, detectorName); + registerAgentRequestBody = Files.readString(Path.of(this.getClass().getClassLoader().getResource(registerAgentFilepath).toURI())); + detectorsIndexMappings = Files + .readString(Path.of(this.getClass().getClassLoader().getResource(detectorsIndexMappingsFilepath).toURI())); + sampleDetector = Files.readString(Path.of(this.getClass().getClassLoader().getResource(sampleDetectorFilepath).toURI())); + sampleIndexMappings = Files.readString(Path.of(this.getClass().getClassLoader().getResource(sampleIndexMappingsFilepath).toURI())); + } + + @BeforeEach + @SneakyThrows + public void prepareTest() { + deleteSystemIndices(); } @After @@ -48,7 +60,6 @@ public void tearDown() { @SneakyThrows public void testSearchAnomalyDetectorsToolInFlowAgent_withNoSystemIndex() { - deleteSystemIndices(); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"detectorName\": \"" + detectorName + "\"}}"; String result = executeAgent(agentId, agentInput); @@ -57,6 +68,9 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_withNoSystemIndex() { @SneakyThrows public void testSearchAnomalyDetectorsToolInFlowAgent_noMatching() { + setupADSystemIndices(); + setupTestDetectionIndex("test-index"); + ingestSampleDetector(detectorName, "test-index"); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"detectorName\": \"" + detectorName + "foo" + "\"}}"; String result = executeAgent(agentId, agentInput); @@ -65,6 +79,9 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_noMatching() { @SneakyThrows public void testSearchAnomalyDetectorsToolInFlowAgent_matching() { + setupADSystemIndices(); + setupTestDetectionIndex("test-index"); + String detectorId = ingestSampleDetector(detectorName, "test-index"); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"detectorName\": \"" + detectorName + "\"}}"; String result = executeAgent(agentId, agentInput); @@ -74,20 +91,39 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_matching() { } @SneakyThrows - private void createDetectorsSystemIndex(String detectorId, String detectorName) { - createIndexWithConfiguration( - ToolConstants.AD_DETECTORS_INDEX, - "{\n" - + " \"mappings\": {\n" - + " \"properties\": {\n" - + " \"name\": {\n" - + " \"type\": \"text\",\n" - + " \"fields\": { \"keyword\": { \"type\": \"keyword\", \"ignore_above\": 256 }}" - + " }\n" - + " }\n" - + " }\n" - + "}" - ); - addDocToIndex(ToolConstants.AD_DETECTORS_INDEX, detectorId, List.of("name"), List.of(detectorName)); + public void testSearchAnomalyDetectorsToolInFlowAgent_complexParams() { + setupADSystemIndices(); + setupTestDetectionIndex("test-index"); + String detectorId = ingestSampleDetector(detectorName, "test-index"); + ingestSampleDetector(detectorName + "foo", "test-index"); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"detectorName\": \"" + + detectorName + + "\", \"highCardinality\": false, \"sortOrder\": \"asc\", \"sortString\": \"name.keyword\", \"size\": 10, \"startIndex\": 0 }}"; + String result = executeAgent(agentId, agentInput); + assertTrue(result.contains(String.format("id=%s", detectorId))); + assertTrue(result.contains(String.format("name=%s", detectorName))); + assertTrue(result.contains(String.format("TotalAnomalyDetectors=%d", 1))); + } + + @SneakyThrows + private void setupADSystemIndices() { + createIndexWithConfiguration(ToolConstants.AD_DETECTORS_INDEX, detectorsIndexMappings); + } + + @SneakyThrows + private void setupTestDetectionIndex(String indexName) { + createIndexWithConfiguration(indexName, sampleIndexMappings); + addDocToIndex(indexName, "foo-id", List.of("timestamp", "value"), List.of(1234, 1)); + } + + private String ingestSampleDetector(String detectorName, String detectionIndex) { + JsonObject sampleDetectorJson = new Gson().fromJson(sampleDetector, JsonObject.class); + JsonArray arr = new JsonArray(1); + arr.add(detectionIndex); + sampleDetectorJson.addProperty("name", detectorName); + sampleDetectorJson.remove("indices"); + sampleDetectorJson.add("indices", arr); + return indexDetector(sampleDetectorJson.toString()); } } diff --git a/src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java b/src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java index 6454af5e..d9afb684 100644 --- a/src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java +++ b/src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java @@ -7,37 +7,45 @@ import java.nio.file.Files; import java.nio.file.Path; -import java.util.List; import java.util.Locale; import org.junit.After; import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; +import org.opensearch.agent.tools.utils.ToolConstants; + +import com.google.gson.Gson; +import com.google.gson.JsonObject; import lombok.SneakyThrows; public class SearchAnomalyResultsToolIT extends BaseAgentToolsIT { private String registerAgentRequestBody; + private String resultsIndexMappings; + private String sampleResult; private static final String detectorId = "foo-id"; private static final double anomalyGrade = 0.5; private static final double confidence = 0.6; private static final String resultsSystemIndexName = ".opendistro-anomaly-results-1"; + private static final String registerAgentFilepath = + "org/opensearch/agent/tools/anomaly-detection/register_flow_agent_of_search_anomaly_results_tool_request_body.json"; + private static final String resultsIndexMappingsFilepath = "org/opensearch/agent/tools/anomaly-detection/results_index_mappings.json"; + private static final String sampleResultFilepath = "org/opensearch/agent/tools/anomaly-detection/sample_result.json"; @Before @SneakyThrows public void setUp() { super.setUp(); - registerAgentRequestBody = Files - .readString( - Path - .of( - this - .getClass() - .getClassLoader() - .getResource("org/opensearch/agent/tools/register_flow_agent_of_search_anomaly_results_tool_request_body.json") - .toURI() - ) - ); - createAnomalyResultsSystemIndex(detectorId, anomalyGrade, confidence); + registerAgentRequestBody = Files.readString(Path.of(this.getClass().getClassLoader().getResource(registerAgentFilepath).toURI())); + resultsIndexMappings = Files + .readString(Path.of(this.getClass().getClassLoader().getResource(resultsIndexMappingsFilepath).toURI())); + sampleResult = Files.readString(Path.of(this.getClass().getClassLoader().getResource(sampleResultFilepath).toURI())); + } + + @BeforeEach + @SneakyThrows + public void prepareTest() { + deleteSystemIndices(); } @After @@ -50,7 +58,6 @@ public void tearDown() { @SneakyThrows public void testSearchAnomalyResultsToolInFlowAgent_withNoSystemIndex() { - deleteSystemIndices(); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"detectorId\": \"" + detectorId + "\"}}"; String result = executeAgent(agentId, agentInput); @@ -59,6 +66,8 @@ public void testSearchAnomalyResultsToolInFlowAgent_withNoSystemIndex() { @SneakyThrows public void testSearchAnomalyResultsToolInFlowAgent_noMatching() { + setupADSystemIndices(); + ingestSampleResult(detectorId, 0.5, 0.5, "1"); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"detectorId\": \"" + detectorId + "foo" + "\"}}"; String result = executeAgent(agentId, agentInput); @@ -67,6 +76,8 @@ public void testSearchAnomalyResultsToolInFlowAgent_noMatching() { @SneakyThrows public void testSearchAnomalyResultsToolInFlowAgent_matching() { + setupADSystemIndices(); + ingestSampleResult(detectorId, anomalyGrade, confidence, "1"); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"detectorId\": \"" + detectorId + "\"}}"; String result = executeAgent(agentId, agentInput); @@ -85,25 +96,41 @@ public void testSearchAnomalyResultsToolInFlowAgent_matching() { } @SneakyThrows - private void createAnomalyResultsSystemIndex(String detectorId, double anomalyGrade, double confidence) { - createIndexWithConfiguration( - resultsSystemIndexName, - "{\n" - + " \"mappings\": {\n" - + " \"properties\": {\n" - + " \"detector_id\": {\"type\": \"keyword\"}," - + " \"anomaly_grade\": {\"type\": \"double\"}," - + " \"confidence\": {\"type\": \"double\"}," - + " \"data_start_time\": {\"type\": \"date\", \"format\": \"strict_date_time||epoch_millis\"}" - + " }\n" - + " }\n" - + "}" - ); - addDocToIndex( - resultsSystemIndexName, - "foo-id", - List.of("detector_id", "anomaly_grade", "confidence"), - List.of(detectorId, anomalyGrade, confidence) + public void testSearchAnomalyResultsToolInFlowAgent_complexParams() { + setupADSystemIndices(); + ingestSampleResult(detectorId, anomalyGrade, confidence, "1"); + ingestSampleResult(detectorId + "foo", anomalyGrade, confidence, "2"); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"detectorId\": \"" + + detectorId + + "\"," + + "\"realTime\": true, \"anomalyGradeThreshold\": 0, \"sortOrder\": \"asc\"," + + "\"sortString\": \"data_start_time\", \"size\": 10, \"startIndex\": 0 }}"; + String result = executeAgent(agentId, agentInput); + assertEquals( + String + .format( + Locale.ROOT, + "AnomalyResults=[{detectorId=%s,grade=%2.1f,confidence=%2.1f}]TotalAnomalyResults=%d", + detectorId, + anomalyGrade, + confidence, + 1 + ), + result ); } + + @SneakyThrows + private void setupADSystemIndices() { + createIndexWithConfiguration(ToolConstants.AD_RESULTS_INDEX, resultsIndexMappings); + } + + private void ingestSampleResult(String detectorId, double anomalyGrade, double anomalyConfidence, String docId) { + JsonObject sampleResultJson = new Gson().fromJson(sampleResult, JsonObject.class); + sampleResultJson.addProperty("detector_id", detectorId); + sampleResultJson.addProperty("anomaly_grade", anomalyGrade); + sampleResultJson.addProperty("confidence", confidence); + addDocToIndex(ToolConstants.AD_RESULTS_INDEX, docId, sampleResultJson.toString()); + } } diff --git a/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java b/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java index 2ed9c726..e14fbca8 100644 --- a/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java +++ b/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java @@ -9,39 +9,33 @@ import java.nio.file.Files; import java.nio.file.Path; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import org.json.JSONObject; import org.junit.After; import org.junit.Before; import org.junit.jupiter.api.BeforeEach; +import com.google.gson.Gson; +import com.google.gson.JsonObject; + import lombok.SneakyThrows; import lombok.extern.log4j.Log4j2; @Log4j2 public class SearchMonitorsToolIT extends BaseAgentToolsIT { private String registerAgentRequestBody; + private String sampleMonitor; private static final String monitorName = "foo-name"; private static final String monitorName2 = "bar-name"; + private static final String registerAgentFilepath = + "org/opensearch/agent/tools/alerting/register_flow_agent_of_search_monitors_tool_request_body.json"; + private static final String sampleMonitorFilepath = "org/opensearch/agent/tools/alerting/sample_monitor.json"; @Before @SneakyThrows public void setUp() { super.setUp(); - registerAgentRequestBody = Files - .readString( - Path - .of( - this - .getClass() - .getClassLoader() - .getResource("org/opensearch/agent/tools/register_flow_agent_of_search_monitors_tool_request_body.json") - .toURI() - ) - ); + registerAgentRequestBody = Files.readString(Path.of(this.getClass().getClassLoader().getResource(registerAgentFilepath).toURI())); + sampleMonitor = Files.readString(Path.of(this.getClass().getClassLoader().getResource(sampleMonitorFilepath).toURI())); } @BeforeEach @@ -68,7 +62,7 @@ public void testSearchMonitorsToolInFlowAgent_withNoSystemIndex() { @SneakyThrows public void testSearchMonitorsToolInFlowAgent_searchById() { - String monitorId = indexMonitor(getMonitorJsonString(monitorName, true)); + String monitorId = ingestSampleMonitor(monitorName, true); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"monitorId\": \"" + monitorId + "\"}}"; @@ -80,7 +74,7 @@ public void testSearchMonitorsToolInFlowAgent_searchById() { @SneakyThrows public void testSearchMonitorsToolInFlowAgent_singleMonitor_noFilter() { - indexMonitor(getMonitorJsonString(monitorName, true)); + ingestSampleMonitor(monitorName, true); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{}}"; @@ -99,8 +93,8 @@ public void testSearchMonitorsToolInFlowAgent_singleMonitor_filter() { @SneakyThrows public void testSearchMonitorsToolInFlowAgent_multipleMonitors_noFilter() { - indexMonitor(getMonitorJsonString(monitorName, true)); - indexMonitor(getMonitorJsonString(monitorName2, false)); + ingestSampleMonitor(monitorName, true); + ingestSampleMonitor(monitorName2, false); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{}}"; @@ -114,8 +108,8 @@ public void testSearchMonitorsToolInFlowAgent_multipleMonitors_noFilter() { @SneakyThrows public void testSearchMonitorsToolInFlowAgent_multipleMonitors_filter() { - indexMonitor(getMonitorJsonString(monitorName, true)); - indexMonitor(getMonitorJsonString(monitorName2, false)); + ingestSampleMonitor(monitorName, true); + ingestSampleMonitor(monitorName2, false); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"monitorName\": \"" + monitorName + "\"}}"; @@ -126,20 +120,23 @@ public void testSearchMonitorsToolInFlowAgent_multipleMonitors_filter() { assertTrue(result.contains("TotalMonitors=1")); } - // Helper fn to create the JSON string to use in a REST request body when indexing a monitor - private String getMonitorJsonString(String monitorName, boolean enabled) { - JSONObject jsonObj = new JSONObject(); - jsonObj.put("type", "monitor"); - jsonObj.put("name", monitorName); - jsonObj.put("enabled", String.valueOf(enabled)); - jsonObj.put("inputs", Collections.emptyList()); - jsonObj.put("triggers", Collections.emptyList()); - Map scheduleMap = new HashMap(); - Map periodMap = new HashMap(); - periodMap.put("interval", 5); - periodMap.put("unit", "MINUTES"); - scheduleMap.put("period", periodMap); - jsonObj.put("schedule", scheduleMap); - return jsonObj.toString(); + @SneakyThrows + public void testSearchMonitorsToolInFlowAgent_multipleMonitors_complexParams() { + ingestSampleMonitor(monitorName, true); + ingestSampleMonitor(monitorName2, false); + + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"monitorName\": \"" + + monitorName + + "\", \"enabled\": true, \"hasTriggers\": false, \"sortOrder\": \"asc\", \"sortString\": \"monitor.name.keyword\", \"size\": 10, \"startIndex\": 0 }}"; + String result = executeAgent(agentId, agentInput); + assertTrue(result.contains("TotalMonitors=1")); + } + + private String ingestSampleMonitor(String monitorName, boolean enabled) { + JsonObject sampleMonitorJson = new Gson().fromJson(sampleMonitor, JsonObject.class); + sampleMonitorJson.addProperty("name", monitorName); + sampleMonitorJson.addProperty("enabled", String.valueOf(enabled)); + return indexMonitor(sampleMonitorJson.toString()); } } diff --git a/src/test/resources/org/opensearch/agent/tools/alerting/alert_index_mappings.json b/src/test/resources/org/opensearch/agent/tools/alerting/alert_index_mappings.json new file mode 100644 index 00000000..9d8c5ce8 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/alerting/alert_index_mappings.json @@ -0,0 +1,173 @@ +{ + "mappings": { + "dynamic": "strict", + "_meta": { + "schema_version": 5 + }, + "properties": { + "schema_version": { + "type": "integer" + }, + "monitor_id": { + "type": "keyword" + }, + "monitor_version": { + "type": "long" + }, + "id": { + "type": "keyword" + }, + "version": { + "type": "long" + }, + "severity": { + "type": "keyword" + }, + "monitor_name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "monitor_user": { + "properties": { + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "backend_roles": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "roles": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "custom_attribute_names": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + } + } + }, + "execution_id": { + "type": "keyword" + }, + "workflow_id": { + "type": "keyword" + }, + "workflow_name": { + "type": "keyword" + }, + "trigger_id": { + "type": "keyword" + }, + "trigger_name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "finding_ids": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "associated_alert_ids": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "related_doc_ids": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "state": { + "type": "keyword" + }, + "start_time": { + "type": "date" + }, + "last_notification_time": { + "type": "date" + }, + "acknowledged_time": { + "type": "date" + }, + "end_time": { + "type": "date" + }, + "error_message": { + "type": "text" + }, + "alert_history": { + "type": "nested", + "properties": { + "timestamp": { + "type": "date" + }, + "message": { + "type": "text" + } + } + }, + "action_execution_results": { + "type": "nested", + "properties": { + "action_id": { + "type": "keyword" + }, + "last_execution_time": { + "type": "date" + }, + "throttled_count": { + "type": "integer" + } + } + }, + "agg_alert_content": { + "dynamic": true, + "properties": { + "parent_bucket_path": { + "type": "text" + }, + "bucket_key": { + "type": "text" + } + } + } + } + } +} \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/alerting/alerting_config_index_mappings.json b/src/test/resources/org/opensearch/agent/tools/alerting/alerting_config_index_mappings.json new file mode 100644 index 00000000..759cd448 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/alerting/alerting_config_index_mappings.json @@ -0,0 +1,1269 @@ +{ + "mappings": { + "_meta": { + "schema_version": 8 + }, + "properties": { + "audit_delegate_monitor_alerts": { + "type": "boolean" + }, + "data_sources": { + "properties": { + "alerts_history_index": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "alerts_history_index_pattern": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "alerts_index": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "findings_enabled": { + "type": "boolean" + }, + "findings_index": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "findings_index_pattern": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "query_index": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "query_index_mappings_by_type": { + "type": "object" + } + } + }, + "destination": { + "dynamic": "false", + "properties": { + "chime": { + "properties": { + "url": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + }, + "custom_webhook": { + "properties": { + "header_params": { + "type": "object", + "enabled": false + }, + "host": { + "type": "text" + }, + "password": { + "type": "text" + }, + "path": { + "type": "keyword" + }, + "port": { + "type": "integer" + }, + "query_params": { + "type": "object", + "enabled": false + }, + "scheme": { + "type": "keyword" + }, + "url": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "username": { + "type": "text" + } + } + }, + "email": { + "properties": { + "email_account_id": { + "type": "keyword" + }, + "recipients": { + "type": "nested", + "properties": { + "email": { + "type": "text" + }, + "email_group_id": { + "type": "keyword" + }, + "type": { + "type": "keyword" + } + } + } + } + }, + "last_update_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "schema_version": { + "type": "integer" + }, + "slack": { + "properties": { + "url": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + }, + "type": { + "type": "keyword" + }, + "user": { + "properties": { + "backend_roles": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "custom_attribute_names": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "roles": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + } + } + } + } + }, + "email_account": { + "properties": { + "from": { + "type": "text" + }, + "host": { + "type": "text" + }, + "method": { + "type": "text" + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "port": { + "type": "integer" + } + } + }, + "email_group": { + "properties": { + "emails": { + "type": "nested", + "properties": { + "email": { + "type": "text" + } + } + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + }, + "enabled": { + "type": "boolean" + }, + "inputs": { + "properties": { + "composite_input": { + "properties": { + "sequence": { + "properties": { + "delegates": { + "properties": { + "monitor_id": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "order": { + "type": "long" + } + } + } + } + } + } + }, + "doc_level_input": { + "properties": { + "description": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "indices": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "queries": { + "properties": { + "id": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "query": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } + } + }, + "search": { + "properties": { + "indices": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "query": { + "properties": { + "aggregations": { + "properties": { + "metric": { + "properties": { + "avg": { + "properties": { + "field": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } + } + } + } + }, + "query": { + "properties": { + "bool": { + "properties": { + "adjust_pure_negative": { + "type": "boolean" + }, + "boost": { + "type": "long" + }, + "filter": { + "properties": { + "range": { + "properties": { + "dayOfWeek": { + "properties": { + "boost": { + "type": "long" + }, + "from": { + "type": "long" + }, + "include_lower": { + "type": "boolean" + }, + "include_upper": { + "type": "boolean" + }, + "to": { + "type": "long" + } + } + }, + "timestamp": { + "properties": { + "boost": { + "type": "long" + }, + "format": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "from": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "include_lower": { + "type": "boolean" + }, + "include_upper": { + "type": "boolean" + }, + "to": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } + } + }, + "term": { + "properties": { + "dayOfWeek": { + "properties": { + "boost": { + "type": "long" + }, + "value": { + "type": "long" + } + } + } + } + } + } + } + } + } + } + }, + "size": { + "type": "long" + } + } + } + } + }, + "uri": { + "properties": { + "api_type": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "path": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "path_params": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "url": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } + } + }, + "last_update_time": { + "type": "long" + }, + "metadata": { + "properties": { + "last_action_execution_times": { + "type": "nested", + "properties": { + "action_id": { + "type": "keyword" + }, + "execution_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + } + } + }, + "last_run_context": { + "type": "object", + "enabled": false + }, + "monitor_id": { + "type": "keyword" + }, + "source_to_query_index_mapping": { + "type": "object", + "enabled": false + } + } + }, + "monitor": { + "dynamic": "false", + "properties": { + "data_sources": { + "properties": { + "alerts_index": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "findings_index": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "query_index": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "query_index_mapping": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + }, + "enabled": { + "type": "boolean" + }, + "enabled_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "group_by_fields": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "inputs": { + "type": "nested", + "properties": { + "search": { + "properties": { + "indices": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "query": { + "type": "object", + "enabled": false + } + } + } + } + }, + "last_update_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "monitor_type": { + "type": "keyword" + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "owner": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "schedule": { + "properties": { + "cron": { + "properties": { + "expression": { + "type": "text" + }, + "timezone": { + "type": "keyword" + } + } + }, + "period": { + "properties": { + "interval": { + "type": "integer" + }, + "unit": { + "type": "keyword" + } + } + } + } + }, + "schema_version": { + "type": "integer" + }, + "triggers": { + "type": "nested", + "properties": { + "actions": { + "type": "nested", + "properties": { + "destination_id": { + "type": "keyword" + }, + "message_template": { + "type": "object", + "enabled": false + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "subject_template": { + "type": "object", + "enabled": false + }, + "throttle": { + "properties": { + "unit": { + "type": "keyword" + }, + "value": { + "type": "integer" + } + } + }, + "throttle_enabled": { + "type": "boolean" + } + } + }, + "condition": { + "type": "object", + "enabled": false + }, + "min_time_between_executions": { + "type": "integer" + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "query_level_trigger": { + "properties": { + "actions": { + "type": "nested", + "properties": { + "destination_id": { + "type": "keyword" + }, + "message_template": { + "type": "object", + "enabled": false + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "subject_template": { + "type": "object", + "enabled": false + }, + "throttle": { + "properties": { + "unit": { + "type": "keyword" + }, + "value": { + "type": "integer" + } + } + }, + "throttle_enabled": { + "type": "boolean" + } + } + }, + "condition": { + "type": "object", + "enabled": false + }, + "min_time_between_executions": { + "type": "integer" + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } + } + }, + "type": { + "type": "keyword" + }, + "ui_metadata": { + "type": "object", + "enabled": false + }, + "user": { + "properties": { + "backend_roles": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "custom_attribute_names": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "roles": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + } + } + } + } + }, + "monitor_type": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "owner": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "schedule": { + "properties": { + "period": { + "properties": { + "interval": { + "type": "long" + }, + "unit": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } + } + }, + "schema_version": { + "type": "long" + }, + "triggers": { + "properties": { + "chained_alert_trigger": { + "properties": { + "condition": { + "properties": { + "script": { + "properties": { + "lang": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "source": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } + } + }, + "id": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "severity": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + }, + "document_level_trigger": { + "properties": { + "condition": { + "properties": { + "script": { + "properties": { + "lang": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "source": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } + } + }, + "id": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "severity": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + }, + "query_level_trigger": { + "properties": { + "condition": { + "properties": { + "script": { + "properties": { + "lang": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "source": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } + } + }, + "id": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "severity": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } + } + }, + "type": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "workflow": { + "dynamic": "false", + "properties": { + "audit_delegate_monitor_alerts": { + "type": "boolean" + }, + "enabled": { + "type": "boolean" + }, + "enabled_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "group_by_fields": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "inputs": { + "type": "nested", + "properties": { + "composite_input": { + "type": "nested", + "properties": { + "sequence": { + "properties": { + "delegates": { + "type": "nested", + "properties": { + "chained_monitor_findings": { + "properties": { + "monitor_id": { + "type": "keyword" + } + } + }, + "monitor_id": { + "type": "keyword" + }, + "order": { + "type": "integer" + } + } + } + } + } + } + } + } + }, + "last_update_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "owner": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "schedule": { + "properties": { + "cron": { + "properties": { + "expression": { + "type": "text" + }, + "timezone": { + "type": "keyword" + } + } + }, + "period": { + "properties": { + "interval": { + "type": "integer" + }, + "unit": { + "type": "keyword" + } + } + } + } + }, + "schema_version": { + "type": "integer" + }, + "type": { + "type": "keyword" + }, + "user": { + "properties": { + "backend_roles": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "custom_attribute_names": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "roles": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + } + } + }, + "workflow_type": { + "type": "keyword" + } + } + }, + "workflow_metadata": { + "properties": { + "latest_execution_id": { + "type": "keyword" + }, + "latest_run_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "monitor_ids": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 1000 + } + } + }, + "workflow_id": { + "type": "keyword" + } + } + }, + "workflow_type": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } +} \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_alerts_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/alerting/register_flow_agent_of_search_alerts_tool_request_body.json similarity index 100% rename from src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_alerts_tool_request_body.json rename to src/test/resources/org/opensearch/agent/tools/alerting/register_flow_agent_of_search_alerts_tool_request_body.json diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_monitors_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/alerting/register_flow_agent_of_search_monitors_tool_request_body.json similarity index 100% rename from src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_monitors_tool_request_body.json rename to src/test/resources/org/opensearch/agent/tools/alerting/register_flow_agent_of_search_monitors_tool_request_body.json diff --git a/src/test/resources/org/opensearch/agent/tools/alerting/sample_alert.json b/src/test/resources/org/opensearch/agent/tools/alerting/sample_alert.json new file mode 100644 index 00000000..65574189 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/alerting/sample_alert.json @@ -0,0 +1,23 @@ +{ + "monitor_id": "foo-monitor-id", + "workflow_id": "", + "workflow_name": "", + "associated_alert_ids": [], + "schema_version": 5, + "monitor_version": 1, + "monitor_name": "foo-monitor", + "execution_id": "foo-execution-id", + "trigger_id": "foo-trigger-id", + "trigger_name": "foo-trigger-name", + "finding_ids": [], + "related_doc_ids": [], + "state": "COMPLETED", + "error_message": null, + "alert_history": [], + "severity": "2", + "action_execution_results": [], + "start_time": 1234, + "last_notification_time": 1234, + "end_time": 1234, + "acknowledged_time": null +} \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/alerting/sample_monitor.json b/src/test/resources/org/opensearch/agent/tools/alerting/sample_monitor.json new file mode 100644 index 00000000..d7e071a4 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/alerting/sample_monitor.json @@ -0,0 +1,15 @@ +{ + "type": "monitor", + "schema_version": 0, + "name": "foo-monitor", + "monitor_type": "query_level_monitor", + "enabled": true, + "schedule": { + "period": { + "interval": 1, + "unit": "MINUTES" + } + }, + "inputs": [], + "triggers": [] +} \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/anomaly-detection/detectors_index_mappings.json b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/detectors_index_mappings.json new file mode 100644 index 00000000..561f30ce --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/detectors_index_mappings.json @@ -0,0 +1,157 @@ +{ + "mappings": { + "dynamic": "false", + "_meta": { + "schema_version": 5 + }, + "properties": { + "category_field": { + "type": "keyword" + }, + "description": { + "type": "text" + }, + "detection_interval": { + "properties": { + "period": { + "properties": { + "interval": { + "type": "integer" + }, + "unit": { + "type": "keyword" + } + } + } + } + }, + "detector_type": { + "type": "keyword" + }, + "feature_attributes": { + "type": "nested", + "properties": { + "aggregation_query": { + "type": "object", + "enabled": false + }, + "feature_enabled": { + "type": "boolean" + }, + "feature_id": { + "type": "keyword", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "feature_name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + }, + "filter_query": { + "type": "object", + "enabled": false + }, + "indices": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "last_update_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "result_index": { + "type": "keyword" + }, + "schema_version": { + "type": "integer" + }, + "shingle_size": { + "type": "integer" + }, + "time_field": { + "type": "keyword" + }, + "ui_metadata": { + "type": "object", + "enabled": false + }, + "user": { + "type": "nested", + "properties": { + "backend_roles": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "custom_attribute_names": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "roles": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + } + } + }, + "window_delay": { + "properties": { + "period": { + "properties": { + "interval": { + "type": "integer" + }, + "unit": { + "type": "keyword" + } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_detectors_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/register_flow_agent_of_search_anomaly_detectors_tool_request_body.json similarity index 100% rename from src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_detectors_tool_request_body.json rename to src/test/resources/org/opensearch/agent/tools/anomaly-detection/register_flow_agent_of_search_anomaly_detectors_tool_request_body.json diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_anomaly_results_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/register_flow_agent_of_search_anomaly_results_tool_request_body.json similarity index 100% rename from src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_anomaly_results_tool_request_body.json rename to src/test/resources/org/opensearch/agent/tools/anomaly-detection/register_flow_agent_of_search_anomaly_results_tool_request_body.json diff --git a/src/test/resources/org/opensearch/agent/tools/anomaly-detection/results_index_mappings.json b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/results_index_mappings.json new file mode 100644 index 00000000..ee4e5e26 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/results_index_mappings.json @@ -0,0 +1,161 @@ +{ + "mappings": { + "dynamic": "false", + "_meta": { + "schema_version": 5 + }, + "properties": { + "anomaly_grade": { + "type": "double" + }, + "anomaly_score": { + "type": "double" + }, + "approx_anomaly_start_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "confidence": { + "type": "double" + }, + "data_end_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "data_start_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "detector_id": { + "type": "keyword" + }, + "entity": { + "type": "nested", + "properties": { + "name": { + "type": "keyword" + }, + "value": { + "type": "keyword" + } + } + }, + "error": { + "type": "text" + }, + "execution_end_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "execution_start_time": { + "type": "date", + "format": "strict_date_time||epoch_millis" + }, + "expected_values": { + "type": "nested", + "properties": { + "likelihood": { + "type": "double" + }, + "value_list": { + "type": "nested", + "properties": { + "data": { + "type": "double" + }, + "feature_id": { + "type": "keyword" + } + } + } + } + }, + "feature_data": { + "type": "nested", + "properties": { + "data": { + "type": "double" + }, + "feature_id": { + "type": "keyword" + } + } + }, + "is_anomaly": { + "type": "boolean" + }, + "model_id": { + "type": "keyword" + }, + "past_values": { + "type": "nested", + "properties": { + "data": { + "type": "double" + }, + "feature_id": { + "type": "keyword" + } + } + }, + "relevant_attribution": { + "type": "nested", + "properties": { + "data": { + "type": "double" + }, + "feature_id": { + "type": "keyword" + } + } + }, + "schema_version": { + "type": "integer" + }, + "task_id": { + "type": "keyword" + }, + "threshold": { + "type": "double" + }, + "user": { + "type": "nested", + "properties": { + "backend_roles": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "custom_attribute_names": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + }, + "name": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "roles": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/anomaly-detection/sample_detector.json b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/sample_detector.json new file mode 100644 index 00000000..b23a3e99 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/sample_detector.json @@ -0,0 +1,48 @@ +{ + "name": "test-detector", + "description": "Test detector", + "time_field": "timestamp", + "indices": [ + "test-index" + ], + "feature_attributes": [ + { + "feature_name": "test", + "feature_enabled": true, + "aggregation_query": { + "test": { + "sum": { + "field": "value" + } + } + } + } + ], + "filter_query": { + "bool": { + "filter": [ + { + "range": { + "value": { + "gt": 1 + } + } + } + ], + "adjust_pure_negative": true, + "boost": 1 + } + }, + "detection_interval": { + "period": { + "interval": 1, + "unit": "Minutes" + } + }, + "window_delay": { + "period": { + "interval": 1, + "unit": "Minutes" + } + } +} \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/anomaly-detection/sample_index_mappings.json b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/sample_index_mappings.json new file mode 100644 index 00000000..0697e7bf --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/sample_index_mappings.json @@ -0,0 +1,12 @@ +{ + "mappings": { + "properties": { + "value": { + "type": "integer" + }, + "timestamp": { + "type": "date" + } + } + } +} \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/anomaly-detection/sample_result.json b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/sample_result.json new file mode 100644 index 00000000..d81a4c32 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/sample_result.json @@ -0,0 +1,19 @@ +{ + "detector_id": "foo-id", + "schema_version": 5, + "data_start_time": 1234, + "data_end_time": 1234, + "feature_data": [ + { + "feature_id": "foo-feature-id", + "feature_name": "foo-feature-name", + "data": 1 + } + ], + "execution_start_time": 1234, + "execution_end_time": 1234, + "anomaly_score": 0.5, + "anomaly_grade": 0.5, + "confidence": 0.5, + "threshold": 0.8 +} \ No newline at end of file From e3d6711a026a423fa8bd238ec83f475b14b892b6 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 2 Feb 2024 12:17:38 +0800 Subject: [PATCH 048/119] fix UT bug (#172) (#175) (cherry picked from commit 6070c6529693f737e9da296f7a892dc31aa02c31) Signed-off-by: xinyual Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- src/main/java/org/opensearch/agent/tools/PPLTool.java | 3 ++- src/test/java/org/opensearch/agent/tools/PPLToolTests.java | 5 +++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index 09fe1d3a..5aaf871f 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -173,7 +173,8 @@ public void run(Map parameters, ActionListener listener) Map dataAsMap = (Map) modelTensor.getDataAsMap(); String ppl = parseOutput(dataAsMap.get("response"), indexName); if (!this.execute) { - listener.onResponse((T) ppl); + Map ret = ImmutableMap.of("ppl", ppl); + listener.onResponse((T) AccessController.doPrivileged((PrivilegedExceptionAction) () -> gson.toJson(ret))); return; } JSONObject jsonContent = new JSONObject(ImmutableMap.of("query", ppl)); diff --git a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java index 129c2411..8ed605a8 100644 --- a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java @@ -142,11 +142,12 @@ public void testTool() { public void testTool_with_WithoutExecution() { PPLTool tool = PPLTool.Factory .getInstance() - .create(ImmutableMap.of("model_id", "modelId", "model_type", "claude", "execute", false)); + .create(ImmutableMap.of("model_id", "modelId", "model_type", "claude", "execute", "false")); assertEquals(PPLTool.TYPE, tool.getName()); tool.run(ImmutableMap.of("index", "demo", "question", "demo"), ActionListener.wrap(executePPLResult -> { - assertEquals("source=demo| head 1", executePPLResult); + Map ret = gson.fromJson(executePPLResult, Map.class); + assertEquals("source=demo| head 1", ret.get("ppl")); }, e -> { log.info(e); })); } From 41606a84b0ee20ba444eb91d03c3ff3f66817f62 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Sun, 4 Feb 2024 10:38:59 +0800 Subject: [PATCH 049/119] Add agent framework configuration to make IT pass (#179) (#180) * Add agent framework configuration to make IT pass * Fix failure UT since the response data structure change --------- (cherry picked from commit 940ac32f13b1c5141ee4a05f458ad4e79d9efc02) Signed-off-by: zane-neo Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../integTest/BaseAgentToolsIT.java | 1 + .../integTest/VisualizationsToolIT.java | 27 ++++++++++++------- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java index 2fdbe6ab..cc7b2702 100644 --- a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java +++ b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java @@ -61,6 +61,7 @@ public void updateClusterSettings() { updateClusterSettings("plugins.ml_commons.native_memory_threshold", 100); updateClusterSettings("plugins.ml_commons.jvm_heap_memory_threshold", 100); updateClusterSettings("plugins.ml_commons.allow_registering_model_via_url", true); + updateClusterSettings("plugins.ml_commons.agent_framework_enabled", true); } @SneakyThrows diff --git a/src/test/java/org/opensearch/integTest/VisualizationsToolIT.java b/src/test/java/org/opensearch/integTest/VisualizationsToolIT.java index e7f54521..2bf0e611 100644 --- a/src/test/java/org/opensearch/integTest/VisualizationsToolIT.java +++ b/src/test/java/org/opensearch/integTest/VisualizationsToolIT.java @@ -16,6 +16,8 @@ import org.opensearch.client.Response; import org.opensearch.core.rest.RestStatus; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; import com.google.gson.JsonParser; import lombok.extern.log4j.Log4j2; @@ -86,7 +88,7 @@ private void prepareVisualization(String title, String id) { } private String extractAdditionalInfo(String responseStr) { - return JsonParser + JsonArray output = JsonParser .parseString(responseStr) .getAsJsonObject() .get("inference_results") @@ -94,14 +96,19 @@ private String extractAdditionalInfo(String responseStr) { .get(0) .getAsJsonObject() .get("output") - .getAsJsonArray() - .get(0) - .getAsJsonObject() - .get("dataAsMap") - .getAsJsonObject() - .get("additional_info") - .getAsJsonObject() - .get(String.format(Locale.ROOT, "%s.output", toolType())) - .getAsString(); + .getAsJsonArray(); + for (JsonElement element : output) { + if ("response".equals(element.getAsJsonObject().get("name").getAsString())) { + return element + .getAsJsonObject() + .get("dataAsMap") + .getAsJsonObject() + .get("additional_info") + .getAsJsonObject() + .get(String.format(Locale.ROOT, "%s.output", toolType())) + .getAsString(); + } + } + return null; } } From 005b4af39934a33732db9af9c780c24936652462 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Sun, 4 Feb 2024 16:24:05 +0800 Subject: [PATCH 050/119] fixPPLAllowedFields (#181) (#182) * fixPPLAllowedFields * rename variables --------- (cherry picked from commit 9174e4ca98005386d8ad111169d9e178e1a40760) Signed-off-by: xinyual Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../org/opensearch/agent/tools/PPLTool.java | 49 ++++++++++++++----- 1 file changed, 38 insertions(+), 11 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index 5aaf871f..46e2496a 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -5,8 +5,6 @@ package org.opensearch.agent.tools; -import static org.opensearch.ml.common.CommonValue.*; - import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; @@ -16,9 +14,11 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Set; import java.util.StringJoiner; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -93,14 +93,35 @@ public class PPLTool implements Tool { private static Gson gson = new Gson(); - private static Map defaultPromptDict; + private static Map DEFAULT_PROMPT_DICT; + + private static Set ALLOWED_FIELDS_TYPE; static { + ALLOWED_FIELDS_TYPE = new HashSet<>(); // from + // https://github.com/opensearch-project/sql/blob/2.x/docs/user/ppl/general/datatypes.rst#data-types-mapping + ALLOWED_FIELDS_TYPE.add("boolean"); + ALLOWED_FIELDS_TYPE.add("byte"); + ALLOWED_FIELDS_TYPE.add("short"); + ALLOWED_FIELDS_TYPE.add("integer"); + ALLOWED_FIELDS_TYPE.add("long"); + ALLOWED_FIELDS_TYPE.add("float"); + ALLOWED_FIELDS_TYPE.add("half_float"); + ALLOWED_FIELDS_TYPE.add("scaled_float"); + ALLOWED_FIELDS_TYPE.add("double"); + ALLOWED_FIELDS_TYPE.add("keyword"); + ALLOWED_FIELDS_TYPE.add("text"); + ALLOWED_FIELDS_TYPE.add("date"); + ALLOWED_FIELDS_TYPE.add("ip"); + ALLOWED_FIELDS_TYPE.add("binary"); + ALLOWED_FIELDS_TYPE.add("object"); + ALLOWED_FIELDS_TYPE.add("nested"); + try { - defaultPromptDict = loadDefaultPromptDict(); + DEFAULT_PROMPT_DICT = loadDefaultPromptDict(); } catch (IOException e) { log.error("fail to load default prompt dict" + e.getMessage()); - defaultPromptDict = new HashMap<>(); + DEFAULT_PROMPT_DICT = new HashMap<>(); } } @@ -127,7 +148,7 @@ public PPLTool(Client client, String modelId, String contextPrompt, String pplMo this.modelId = modelId; this.pplModelType = PPLModelType.from(pplModelType); if (contextPrompt.isEmpty()) { - this.contextPrompt = this.defaultPromptDict.getOrDefault(this.pplModelType.toString(), ""); + this.contextPrompt = this.DEFAULT_PROMPT_DICT.getOrDefault(this.pplModelType.toString(), ""); } else { this.contextPrompt = contextPrompt; } @@ -147,13 +168,15 @@ public void run(Map parameters, ActionListener listener) + indexName ); } - SearchRequest searchRequest = buildSearchRequest(indexName); + GetMappingsRequest getMappingsRequest = buildGetMappingRequest(indexName); client.admin().indices().getMappings(getMappingsRequest, ActionListener.wrap(getMappingsResponse -> { Map mappings = getMappingsResponse.getMappings(); if (mappings.size() == 0) { throw new IllegalArgumentException("No matching mapping with index name: " + indexName); } + String firstIndexName = (String) mappings.keySet().toArray()[0]; + SearchRequest searchRequest = buildSearchRequest(firstIndexName); client.search(searchRequest, ActionListener.wrap(searchResponse -> { SearchHit[] searchHits = searchResponse.getHits().getHits(); String tableInfo = constructTableInfo(searchHits, mappings); @@ -318,13 +341,17 @@ private String constructTableInfo(SearchHit[] searchHits, Map Date: Mon, 5 Feb 2024 16:12:49 +0800 Subject: [PATCH 051/119] backport from #131 (#183) * backport from #131 Signed-off-by: xinyual * backport from #131 Signed-off-by: xinyual * backport from #131 Signed-off-by: xinyual --------- Signed-off-by: xinyual --- .../org/opensearch/agent/tools/PPLTool.java | 52 +++++++++++++++++-- .../agent/tools/PPLDefaultPrompt.json | 2 +- .../opensearch/agent/tools/PPLToolTests.java | 48 +++++++++++++++++ .../org/opensearch/integTest/PPLToolIT.java | 20 ++++++- 4 files changed, 114 insertions(+), 8 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index 46e2496a..46b900a8 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -72,7 +72,8 @@ public class PPLTool implements Tool { @Setter private Client client; - private static final String DEFAULT_DESCRIPTION = "Use this tool to generate PPL and execute."; + private static final String DEFAULT_DESCRIPTION = + "\"Use this tool when user ask question based on the data in the cluster or parse user statement about which index to use in a conversion.\nAlso use this tool when question only contains index information.\n1. If uesr question contain both question and index name, the input parameters are {'question': UserQuestion, 'index': IndexName}.\n2. If user question contain only question, the input parameter is {'question': UserQuestion}.\n3. If uesr question contain only index name, find the original human input from the conversation histroy and formulate parameter as {'question': UserQuestion, 'index': IndexName}\nThe index name should be exactly as stated in user's input."; @Setter @Getter @@ -91,6 +92,8 @@ public class PPLTool implements Tool { private PPLModelType pplModelType; + private String previousToolKey; + private static Gson gson = new Gson(); private static Map DEFAULT_PROMPT_DICT; @@ -143,7 +146,7 @@ public static PPLModelType from(String value) { } - public PPLTool(Client client, String modelId, String contextPrompt, String pplModelType, boolean execute) { + public PPLTool(Client client, String modelId, String contextPrompt, String pplModelType, String previousToolKey, boolean execute) { this.client = client; this.modelId = modelId; this.pplModelType = PPLModelType.from(pplModelType); @@ -152,12 +155,19 @@ public PPLTool(Client client, String modelId, String contextPrompt, String pplMo } else { this.contextPrompt = contextPrompt; } + this.previousToolKey = previousToolKey; this.execute = execute; } @Override public void run(Map parameters, ActionListener listener) { - String indexName = parameters.get("index"); + parameters = extractFromChatParameters(parameters); + String indexName = getIndexNameFromParameters(parameters); + if (StringUtils.isBlank(indexName)) { + throw new IllegalArgumentException( + "Return this final answer to human directly and do not use other tools: 'Please provide index name'. Please try to directly send this message to human to ask for index name" + ); + } String question = parameters.get("question"); if (StringUtils.isBlank(indexName) || StringUtils.isBlank(question)) { throw new IllegalArgumentException("Parameter index and question can not be null or empty."); @@ -180,7 +190,7 @@ public void run(Map parameters, ActionListener listener) client.search(searchRequest, ActionListener.wrap(searchResponse -> { SearchHit[] searchHits = searchResponse.getHits().getHits(); String tableInfo = constructTableInfo(searchHits, mappings); - String prompt = constructPrompt(tableInfo, question, indexName); + String prompt = constructPrompt(tableInfo, question.strip(), indexName); RemoteInferenceInputDataSet inputDataSet = RemoteInferenceInputDataSet .builder() .parameters(Collections.singletonMap("prompt", prompt)) @@ -234,7 +244,17 @@ public void run(Map parameters, ActionListener listener) )); }, e -> { log.info("fail to get mapping: " + e); - listener.onFailure(e); + String errorMessage = e.getMessage(); + if (errorMessage.contains("no such index")) { + listener + .onFailure( + new IllegalArgumentException( + "Return this final answer to human directly and do not use other tools: 'Please provide index name'. Please try to directly send this message to human to ask for index name" + ) + ); + } else { + listener.onFailure(e); + } })); } @@ -285,6 +305,7 @@ public PPLTool create(Map map) { (String) map.get("model_id"), (String) map.getOrDefault("prompt", ""), (String) map.getOrDefault("model_type", ""), + (String) map.getOrDefault("previous_tool_name", ""), Boolean.valueOf((String) map.getOrDefault("execute", "true")) ); } @@ -434,6 +455,7 @@ private String parseOutput(String llmOutput, String indexName) { ppl = matcher.group(1).replaceAll("[\\r\\n]", "").replaceAll("ISNOTNULL", "isnotnull").trim(); } else { // logic for only ppl returned int sourceIndex = llmOutput.indexOf("source="); + int describeIndex = llmOutput.indexOf("describe "); if (sourceIndex != -1) { llmOutput = llmOutput.substring(sourceIndex); @@ -445,6 +467,17 @@ private String parseOutput(String llmOutput, String indexName) { lists[0] = "source=" + indexName; } + // Joining the string back together + ppl = String.join("|", lists); + } else if (describeIndex != -1) { + llmOutput = llmOutput.substring(describeIndex); + String[] lists = llmOutput.split("\\|"); + + // Modifying the first element + if (lists.length > 0) { + lists[0] = "describe " + indexName; + } + // Joining the string back together ppl = String.join("|", lists); } else { @@ -456,6 +489,15 @@ private String parseOutput(String llmOutput, String indexName) { return ppl; } + private String getIndexNameFromParameters(Map parameters) { + String indexName = parameters.getOrDefault("index", ""); + if (!StringUtils.isBlank(this.previousToolKey) && StringUtils.isBlank(indexName)) { + indexName = parameters.getOrDefault(this.previousToolKey + ".output", ""); // read index name from previous key + } + return indexName; + + } + private static Map loadDefaultPromptDict() throws IOException { InputStream searchResponseIns = PPLTool.class.getResourceAsStream("PPLDefaultPrompt.json"); if (searchResponseIns != null) { diff --git a/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json b/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json index 9fda5865..79d19a22 100644 --- a/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json +++ b/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json @@ -1,4 +1,4 @@ { "CLAUDE": "\n\nHuman:You will be given a question about some metrics from a user.\nUse context provided to write a PPL query that can be used to retrieve the information.\n\nHere is a sample PPL query:\nsource=\\`\\` | where \\`\\` = '\\`\\`'\n\nHere are some sample questions and the PPL query to retrieve the information. The format for fields is\n\\`\\`\\`\n- field_name: field_type (sample field value)\n\\`\\`\\`\n\nFor example, below is a field called \\`timestamp\\`, it has a field type of \\`date\\`, and a sample value of it could look like \\`1686000665919\\`.\n\\`\\`\\`\n- timestamp: date (1686000665919)\n\\`\\`\\`\n----------------\n\nThe following text contains fields and questions/answers for the 'accounts' index\n\nFields:\n- account_number: long (101)\n- address: text ('880 Holmes Lane')\n- age: long (32)\n- balance: long (39225)\n- city: text ('Brogan')\n- email: text ('amberduke@pyrami.com')\n- employer: text ('Pyrami')\n- firstname: text ('Amber')\n- gender: text ('M')\n- lastname: text ('Duke')\n- state: text ('IL')\n- registered_at: date (1686000665919)\n\nQuestion: Give me some documents in index 'accounts'\nPPL: source=\\`accounts\\` | head\n\nQuestion: Give me 5 oldest people in index 'accounts'\nPPL: source=\\`accounts\\` | sort -age | head 5\n\nQuestion: Give me first names of 5 youngest people in index 'accounts'\nPPL: source=\\`accounts\\` | sort +age | head 5 | fields \\`firstname\\`\n\nQuestion: Give me some addresses in index 'accounts'\nPPL: source=\\`accounts\\` | fields \\`address\\`\n\nQuestion: Find the documents in index 'accounts' where firstname is 'Hattie'\nPPL: source=\\`accounts\\` | where \\`firstname\\` = 'Hattie'\n\nQuestion: Find the emails where firstname is 'Hattie' or lastname is 'Frank' in index 'accounts'\nPPL: source=\\`accounts\\` | where \\`firstname\\` = 'Hattie' OR \\`lastname\\` = 'frank' | fields \\`email\\`\n\nQuestion: Find the documents in index 'accounts' where firstname is not 'Hattie' and lastname is not 'Frank'\nPPL: source=\\`accounts\\` | where \\`firstname\\` != 'Hattie' AND \\`lastname\\` != 'frank'\n\nQuestion: Find the emails that contain '.com' in index 'accounts'\nPPL: source=\\`accounts\\` | where QUERY_STRING(['email'], '.com') | fields \\`email\\`\n\nQuestion: Find the documents in index 'accounts' where there is an email\nPPL: source=\\`accounts\\` | where ISNOTNULL(\\`email\\`)\n\nQuestion: Count the number of documents in index 'accounts'\nPPL: source=\\`accounts\\` | stats COUNT() AS \\`count\\`\n\nQuestion: Count the number of people with firstnaQuestion: Count the number of people withe=\\`accounts\\` | where \\`firstname\\` ='Amber' | stats COUNT() AS \\`count\\`\n\nQuestion: How many people are older than 33? index is 'accounts'\nPPL: source=\\`accounts\\` | where \\`age\\` > 33 | stats COUNT() AS \\`count\\`\n\nQuestion: How many distinct ages? index is 'accounts'\nPPL: source=\\`accounts\\` | stats DISTINCT_COUNT(age) AS \\`distinct_count\\`\n\nQuestion: How many males and females in index 'accounts'?\nPPL: source=\\`accounts\\` | stats COUNT() AS \\`count\\` BY \\`gender\\`\n\nQuestion: What is the average, minimum, maximum age in 'accounts' index?\nPPL: source=\\`accounts\\` | stats AVG(\\`age\\`) AS \\`avg_age\\`, MIN(\\`age\\`) AS \\`min_age\\`, MAX(\\`age\\`) AS \\`max_age\\`\n\nQuestion: Show all states sorted by average balance. index is 'accounts'\nPPL: source=\\`accounts\\` | stats AVG(\\`balance\\`) AS \\`avg_balance\\` BY \\`state\\` | sort +avg_balance\n\n----------------\n\nThe following text contains fields and questions/answers for the 'ecommerce' index\n\nFields:\n- category: text ('Men's Clothing')\n- currency: keyword ('EUR')\n- customer_birth_date: date (null)\n- customer_first_name: text ('Eddie')\n- customer_full_name: text ('Eddie Underwood')\n- customer_gender: keyword ('MALE')\n- customer_id: keyword ('38')\n- customer_last_name: text ('Underwood')\n- customer_phone: keyword ('')\n- day_of_week: keyword ('Monday')\n- day_of_week_i: integer (0)\n- email: keyword ('eddie@underwood-family.zzz')\n- event.dataset: keyword ('sample_ecommerce')\n- geoip.city_name: keyword ('Cairo')\n- geoip.continent_name: keyword ('Africa')\n- geoip.country_iso_code: keyword ('EG')\n- geoip.location: geo_point ([object Object])\n- geoip.region_name: keyword ('Cairo Governorate')\n- manufacturer: text ('Elitelligence,Oceanavigations')\n- order_date: date (2023-06-05T09:28:48+00:00)\n- order_id: keyword ('584677')\n- products._id: text (null)\n- products.base_price: half_float (null)\n- products.base_unit_price: half_float (null)\n- products.category: text (null)\n- products.created_on: date (null)\n- products.discount_amount: half_float (null)\n- products.discount_percentage: half_float (null)\n- products.manufacturer: text (null)\n- products.min_price: half_float (null)\n- products.price: half_float (null)\n- products.product_id: long (null)\n- products.product_name: text (null)\n- products.quantity: integer (null)\n- products.sku: keyword (null)\n- products.tax_amount: half_float (null)\n- products.taxful_price: half_float (null)\n- products.taxless_price: half_float (null)\n- products.unit_discount_amount: half_float (null)\n- sku: keyword ('ZO0549605496,ZO0299602996')\n- taxful_total_price: half_float (36.98)\n- taxless_total_price: half_float (36.98)\n- total_quantity: integer (2)\n- total_unique_products: integer (2)\n- type: keyword ('order')\n- user: keyword ('eddie')\n\nQuestion: What is the average price of products in clothing category ordered in the last 7 days? index is 'ecommerce'\nPPL: source=\\`ecommerce\\` | where QUERY_STRING(['category'], 'clothing') AND \\`order_date\\` > DATE_SUB(NOW(), INTERVAL 7 DAY) | stats AVG(\\`taxful_total_price\\`) AS \\`avg_price\\`\n\nQuestion: What is the average price of products in each city ordered today by every 2 hours? index is 'ecommerce'\nPPL: source=\\`ecommerce\\` | where \\`order_date\\` > DATE_SUB(NOW(), INTERVAL 24 HOUR) | stats AVG(\\`taxful_total_price\\`) AS \\`avg_price\\` by SPAN(\\`order_date\\`, 2h) AS \\`span\\`, \\`geoip.city_name\\`\n\nQuestion: What is the total revenue of shoes each day in this week? index is 'ecommerce'\nPPL: source=\\`ecommerce\\` | where QUERY_STRING(['category'], 'shoes') AND \\`order_date\\` > DATE_SUB(NOW(), INTERVAL 1 WEEK) | stats SUM(\\`taxful_total_price\\`) AS \\`revenue\\` by SPAN(\\`order_date\\`, 1d) AS \\`span\\`\n\n----------------\n\nThe following text contains fields and questions/answers for the 'events' index\nFields:\n- timestamp: long (1686000665919)\n- attributes.data_stream.dataset: text ('nginx.access')\n- attributes.data_stream.namespace: text ('production')\n- attributes.data_stream.type: text ('logs')\n- body: text ('172.24.0.1 - - [02/Jun/2023:23:09:27 +0000] 'GET / HTTP/1.1' 200 4955 '-' 'Mozilla/5.0 zgrab/0.x'')\n- communication.source.address: text ('127.0.0.1')\n- communication.source.ip: text ('172.24.0.1')\n- container_id: text (null)\n- container_name: text (null)\n- event.category: text ('web')\n- event.domain: text ('nginx.access')\n- event.kind: text ('event')\n- event.name: text ('access')\n- event.result: text ('success')\n- event.type: text ('access')\n- http.flavor: text ('1.1')\n- http.request.method: text ('GET')\n- http.response.bytes: long (4955)\n- http.response.status_code: keyword ('200')\n- http.url: text ('/')\n- log: text (null)\n- observerTime: date (1686000665919)\n- source: text (null)\n- span_id: text ('abcdef1010')\n- trace_id: text ('102981ABCD2901')\n\nQuestion: What are recent logs with errors and contains word 'test'? index is 'events'\nPPL: source=\\`events\\` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') AND QUERY_STRING(['body'], 'test') AND \\`observerTime\\` > DATE_SUB(NOW(), INTERVAL 5 MINUTE)\n\nQuestion: What is the total number of log with a status code other than 200 in 2023 Feburary? index is 'events'\nPPL: source=\\`events\\` | where QUERY_STRING(['http.response.status_code'], '!200') AND \\`observerTime\\` >= '2023-03-01 00:00:00' AND \\`observerTime\\` < '2023-04-01 00:00:00' | stats COUNT() AS \\`count\\`\n\nQuestion: Count the number of business days that have web category logs last week? index is 'events'\nPPL: source=\\`events\\` | where \\`category\\` = 'web' AND \\`observerTime\\` > DATE_SUB(NOW(), INTERVAL 1 WEEK) AND DAY_OF_WEEK(\\`observerTime\\`) >= 2 AND DAY_OF_WEEK(\\`observerTime\\`) <= 6 | stats DISTINCT_COUNT(DATE_FORMAT(\\`observerTime\\`, 'yyyy-MM-dd')) AS \\`distinct_count\\`\n\nQuestion: What are the top traces with largest bytes? index is 'events'\nPPL: source=\\`events\\` | stats SUM(\\`http.response.bytes\\`) AS \\`sum_bytes\\` by \\`trace_id\\` | sort -sum_bytes | head\n\nQuestion: Give me log patterns? index is 'events'\nPPL: source=\\`events\\` | patterns \\`body\\` | stats take(\\`body\\`, 1) AS \\`sample_pattern\\` by \\`patterns_field\\` | fields \\`sample_pattern\\`\n\nQuestion: Give me log patterns for logs with errors? index is 'events'\nPPL: source=\\`events\\` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') | patterns \\`body\\` | stats take(\\`body\\`, 1) AS \\`sample_pattern\\` by \\`patterns_field\\` | fields \\`sample_pattern\\`\n\n----------------\n\nUse the following steps to generate the PPL query:\n\nStep 1. Find all field entities in the question.\n\nStep 2. Pick the fields that are relevant to the question from the provided fields list using entities. Rules:\n#01 Consider the field name, the field type, and the sample value when picking relevant fields. For example, if you need to filter flights departed from 'JFK', look for a \\`text\\` or \\`keyword\\` field with a field name such as 'departedAirport', and the sample value should be a 3 letter IATA airport code. Similarly, if you need a date field, look for a relevant field name with type \\`date\\` and not \\`long\\`.\n#02 You must pick a field with \\`date\\` type when filtering on date/time.\n#03 You must pick a field with \\`date\\` type when aggregating by time interval.\n#04 You must not use the sample value in PPL query, unless it is relevant to the question.\n#05 You must only pick fields that are relevant, and must pick the whole field name from the fields list.\n#06 You must not use fields that are not in the fields list.\n#07 You must not use the sample values unless relevant to the question.\n#08 You must pick the field that contains a log line when asked about log patterns. Usually it is one of \\`log\\`, \\`body\\`, \\`message\\`.\n\nStep 3. Use the choosen fields to write the PPL query. Rules:\n#01 Always use comparisons to filter date/time, eg. 'where \\`timestamp\\` > DATE_SUB(NOW(), INTERVAL 1 DAY)'; or by absolute time: 'where \\`timestamp\\` > 'yyyy-MM-dd HH:mm:ss'', eg. 'where \\`timestamp\\` < '2023-01-01 00:00:00''. Do not use \\`DATE_FORMAT()\\`.\n#02 Only use PPL syntax and keywords appeared in the question or in the examples.\n#03 If user asks for current or recent status, filter the time field for last 5 minutes.\n#04 The field used in 'SPAN(\\`\\`, )' must have type \\`date\\`, not \\`long\\`.\n#05 When aggregating by \\`SPAN\\` and another field, put \\`SPAN\\` after \\`by\\` and before the other field, eg. 'stats COUNT() AS \\`count\\` by SPAN(\\`timestamp\\`, 1d) AS \\`span\\`, \\`category\\`'.\n#06 You must put values in quotes when filtering fields with \\`text\\` or \\`keyword\\` field type.\n#07 To find documents that contain certain phrases in string fields, use \\`QUERY_STRING\\` which supports multiple fields and wildcard, eg. 'where QUERY_STRING(['field1', 'field2'], 'prefix*')'.\n#08 To find 4xx and 5xx errors using status code, if the status code field type is numberic (eg. \\`integer\\`), then use 'where \\`status_code\\` >= 400'; if the field is a string (eg. \\`text\\` or \\`keyword\\`), then use 'where QUERY_STRING(['status_code'], '4* OR 5*')'.\n\n----------------\nPut your PPL query in tags.\n----------------\nQuestion: ${indexInfo.question}? index is \\`${indexInfo.indexName}\\`\nFields:\n${indexInfo.mappingInfo}\n\nAssistant:", - "FINETUNE": "Below is an instruction that describes a task, paired with the index and corresponding fields that provides further context. Write a response that appropriately completes the request.\n\n### Instruction:\nI have an opensearch index with fields in the following. Now I have a question: ${indexInfo.question} Can you help me generate a PPL for that?\n\n### Index:\n${indexInfo.indexName}\n\n### Fields:\n${indexInfo.mappingInfo}\n\n### Response:\n" + "FINETUNE": "Below is an instruction that describes a task, paired with the index and corresponding fields that provides further context. Write a response that appropriately completes the request.\n\n### Instruction:\nI have an opensearch index with fields in the following. Now I have a question: ${indexInfo.question}. Can you help me generate a PPL for that?\n\n### Index:\n${indexInfo.indexName}\n\n### Fields:\n${indexInfo.mappingInfo}\n\n### Response:\n" } \ No newline at end of file diff --git a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java index 8ed605a8..b3c96816 100644 --- a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java @@ -138,6 +138,21 @@ public void testTool() { } + @Test + public void testTool_withPreviousInput() { + PPLTool tool = PPLTool.Factory + .getInstance() + .create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt", "previous_tool_name", "previousTool")); + assertEquals(PPLTool.TYPE, tool.getName()); + + tool.run(ImmutableMap.of("previousTool.output", "demo", "question", "demo"), ActionListener.wrap(executePPLResult -> { + Map returnResults = gson.fromJson(executePPLResult, Map.class); + assertEquals("ppl result", returnResults.get("executionResult")); + assertEquals("source=demo| head 1", returnResults.get("ppl")); + }, e -> { log.info(e); })); + + } + @Test public void testTool_with_WithoutExecution() { PPLTool tool = PPLTool.Factory @@ -182,6 +197,23 @@ public void testTool_withPPLTag() { } + @Test + public void testTool_withDescribeStartPPL() { + PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + assertEquals(PPLTool.TYPE, tool.getName()); + + pplReturns = Collections.singletonMap("response", "describe demo"); + modelTensor = new ModelTensor("tensor", new Number[0], new long[0], MLResultDataType.STRING, null, null, pplReturns); + initMLTensors(); + + tool.run(ImmutableMap.of("index", "demo", "question", "demo"), ActionListener.wrap(executePPLResult -> { + Map returnResults = gson.fromJson(executePPLResult, Map.class); + assertEquals("ppl result", returnResults.get("executionResult")); + assertEquals("describe demo", returnResults.get("ppl")); + }, e -> { log.info(e); })); + + } + @Test public void testTool_querySystemIndex() { PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); @@ -199,6 +231,22 @@ public void testTool_querySystemIndex() { ); } + @Test + public void testTool_queryEmptyIndex() { + PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + assertEquals(PPLTool.TYPE, tool.getName()); + Exception exception = assertThrows( + IllegalArgumentException.class, + () -> tool.run(ImmutableMap.of("question", "demo"), ActionListener.wrap(ppl -> { + assertEquals(pplResult, "ppl result"); + }, e -> { assertEquals("We cannot search system indices " + ML_CONNECTOR_INDEX, e.getMessage()); })) + ); + assertEquals( + "Return this final answer to human directly and do not use other tools: 'Please provide index name'. Please try to directly send this message to human to ask for index name", + exception.getMessage() + ); + } + @Test public void testTool_WrongModelType() { PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "model_type", "wrong_model_type")); diff --git a/src/test/java/org/opensearch/integTest/PPLToolIT.java b/src/test/java/org/opensearch/integTest/PPLToolIT.java index 837a8969..d25c6a95 100644 --- a/src/test/java/org/opensearch/integTest/PPLToolIT.java +++ b/src/test/java/org/opensearch/integTest/PPLToolIT.java @@ -108,14 +108,30 @@ public void testPPLTool_withNonExistingIndex_thenThrowException() { ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"correct\", \"index\": \"employee2\"}}") ); - MatcherAssert.assertThat(exception.getMessage(), allOf(containsString("no such index [employee2]"))); + MatcherAssert + .assertThat( + exception.getMessage(), + allOf( + containsString( + "Return this final answer to human directly and do not use other tools: 'Please provide index name'. Please try to directly send this message to human to ask for index name" + ) + ) + ); } public void testPPLTool_withBlankInput_thenThrowException() { prepareIndex(); String agentId = registerAgent(); Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); - MatcherAssert.assertThat(exception.getMessage(), allOf(containsString("Parameter index and question can not be null or empty."))); + MatcherAssert + .assertThat( + exception.getMessage(), + allOf( + containsString( + "Return this final answer to human directly and do not use other tools: 'Please provide index name'. Please try to directly send this message to human to ask for index name" + ) + ) + ); } @SneakyThrows From b8e18024d6d046b3fe4a2f3c27458540c6c251c9 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Tue, 6 Feb 2024 16:57:10 +0800 Subject: [PATCH 052/119] trim index name in parameters for PPL tool (#185) (#186) (cherry picked from commit df90be3aed40bae85d16a33eeaf399a339fcbb4f) Signed-off-by: yuye-aws Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- src/main/java/org/opensearch/agent/tools/PPLTool.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index 46b900a8..bcbd29ec 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -494,8 +494,7 @@ private String getIndexNameFromParameters(Map parameters) { if (!StringUtils.isBlank(this.previousToolKey) && StringUtils.isBlank(indexName)) { indexName = parameters.getOrDefault(this.previousToolKey + ".output", ""); // read index name from previous key } - return indexName; - + return indexName.trim(); } private static Map loadDefaultPromptDict() throws IOException { From b40255e572c81c86d2185796b8d58d5f239eb2ae Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Tue, 6 Feb 2024 17:25:59 -0800 Subject: [PATCH 053/119] Fix alert constructor (#191) (#193) * Fix alert constructor * Update alert mapping * Change sample alert example --------- (cherry picked from commit e323660468e16538cd6254622ad1eb287260053f) Signed-off-by: Tyler Ohlsen Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../org/opensearch/agent/tools/SearchAlertsToolTests.java | 6 ++++-- .../agent/tools/alerting/alert_index_mappings.json | 8 ++++++++ .../org/opensearch/agent/tools/alerting/sample_alert.json | 3 ++- 3 files changed, 14 insertions(+), 3 deletions(-) diff --git a/src/test/java/org/opensearch/agent/tools/SearchAlertsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAlertsToolTests.java index ca1f8b99..ac1a8b3b 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchAlertsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchAlertsToolTests.java @@ -108,7 +108,8 @@ public void testRunWithAlerts() throws Exception { Collections.emptyList(), null, null, - Collections.emptyList() + Collections.emptyList(), + null ); Alert alert2 = new Alert( "alert-id-2", @@ -135,7 +136,8 @@ public void testRunWithAlerts() throws Exception { Collections.emptyList(), null, null, - Collections.emptyList() + Collections.emptyList(), + null ); List mockAlerts = List.of(alert1, alert2); diff --git a/src/test/resources/org/opensearch/agent/tools/alerting/alert_index_mappings.json b/src/test/resources/org/opensearch/agent/tools/alerting/alert_index_mappings.json index 9d8c5ce8..c15410f5 100644 --- a/src/test/resources/org/opensearch/agent/tools/alerting/alert_index_mappings.json +++ b/src/test/resources/org/opensearch/agent/tools/alerting/alert_index_mappings.json @@ -167,6 +167,14 @@ "type": "text" } } + }, + "clusters": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword" + } + } } } } diff --git a/src/test/resources/org/opensearch/agent/tools/alerting/sample_alert.json b/src/test/resources/org/opensearch/agent/tools/alerting/sample_alert.json index 65574189..2a15c7a7 100644 --- a/src/test/resources/org/opensearch/agent/tools/alerting/sample_alert.json +++ b/src/test/resources/org/opensearch/agent/tools/alerting/sample_alert.json @@ -19,5 +19,6 @@ "start_time": 1234, "last_notification_time": 1234, "end_time": 1234, - "acknowledged_time": null + "acknowledged_time": null, + "clusters": [] } \ No newline at end of file From 1a514c6c5ef2627f0dee5716f1001e1f83b0c890 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 7 Feb 2024 09:49:59 +0800 Subject: [PATCH 054/119] Increase wait time to avoid flaky test (#173) (#176) * increae wait time to avoid flaky test * add retry for setinig up connector * fix size hardcoded --------- (cherry picked from commit e9632040e47b24963a86b38a68d723d530033eeb) Signed-off-by: Hailong Cui Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../agent/tools/VisualizationsTool.java | 2 +- .../integTest/ToolIntegrationTest.java | 25 +++++++++++++------ 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java b/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java index 35bb5c1c..2fa6b996 100644 --- a/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java +++ b/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java @@ -77,7 +77,7 @@ public void run(Map parameters, ActionListener listener) boolQueryBuilder.must().add(QueryBuilders.matchQuery(SAVED_OBJECT_TYPE + ".title", parameters.get("input"))); SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource().query(boolQueryBuilder); - searchSourceBuilder.from(0).size(3); + searchSourceBuilder.from(0).size(size); SearchRequest searchRequest = Requests.searchRequest(index).source(searchSourceBuilder); client.search(searchRequest, new ActionListener<>() { diff --git a/src/test/java/org/opensearch/integTest/ToolIntegrationTest.java b/src/test/java/org/opensearch/integTest/ToolIntegrationTest.java index 74c3d5cf..315812b1 100644 --- a/src/test/java/org/opensearch/integTest/ToolIntegrationTest.java +++ b/src/test/java/org/opensearch/integTest/ToolIntegrationTest.java @@ -43,13 +43,7 @@ public void setupTestAgent() throws IOException, InterruptedException { server = MockHttpServer.setupMockLLM(promptHandlers()); server.start(); clusterSettings(false); - try { - connectorId = setUpConnector(); - } catch (Exception e) { - // Wait for ML encryption master key has been initialized - TimeUnit.SECONDS.sleep(10); - connectorId = setUpConnector(); - } + connectorId = setUpConnectorWithRetry(5); modelGroupId = setupModelGroup(); modelId = setupLLMModel(connectorId, modelGroupId); // wait for model to get deployed @@ -73,6 +67,23 @@ public void deleteModel() { deleteModel(modelId); } + private String setUpConnectorWithRetry(int maxRetryTimes) throws InterruptedException { + int retryTimes = 0; + String connectorId = null; + while (retryTimes < maxRetryTimes) { + try { + connectorId = setUpConnector(); + break; + } catch (Exception e) { + // Wait for ML encryption master key has been initialized + log.info("Failed to setup connector, retry times: {}", retryTimes); + retryTimes++; + TimeUnit.SECONDS.sleep(10); + } + } + return connectorId; + } + private String setUpConnector() { String url = String.format(Locale.ROOT, "http://127.0.0.1:%d/invoke", server.getAddress().getPort()); return createConnector( From d6f6cded93ad319ebcbee8a847a3fb17702476d4 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 7 Feb 2024 09:54:53 +0800 Subject: [PATCH 055/119] (PPLTool) update claude and openai prompts (#184) (#192) (cherry picked from commit e001987dfaf175047d4868586e4da7a00896a9c9) Signed-off-by: Joshua Li Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- src/main/java/org/opensearch/agent/tools/PPLTool.java | 5 +++-- .../org/opensearch/agent/tools/PPLDefaultPrompt.json | 7 ++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index bcbd29ec..c2820c33 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -130,7 +130,8 @@ public class PPLTool implements Tool { public enum PPLModelType { CLAUDE, - FINETUNE; + FINETUNE, + OPENAI; public static PPLModelType from(String value) { if (value.isEmpty()) { @@ -139,7 +140,7 @@ public static PPLModelType from(String value) { try { return PPLModelType.valueOf(value.toUpperCase(Locale.ROOT)); } catch (Exception e) { - log.error("Wrong PPL Model type, should be CLAUDE or FINETUNE"); + log.error("Wrong PPL Model type, should be CLAUDE, FINETUNE, or OPENAI"); return PPLModelType.CLAUDE; } } diff --git a/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json b/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json index 79d19a22..885e4679 100644 --- a/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json +++ b/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json @@ -1,4 +1,5 @@ { - "CLAUDE": "\n\nHuman:You will be given a question about some metrics from a user.\nUse context provided to write a PPL query that can be used to retrieve the information.\n\nHere is a sample PPL query:\nsource=\\`\\` | where \\`\\` = '\\`\\`'\n\nHere are some sample questions and the PPL query to retrieve the information. The format for fields is\n\\`\\`\\`\n- field_name: field_type (sample field value)\n\\`\\`\\`\n\nFor example, below is a field called \\`timestamp\\`, it has a field type of \\`date\\`, and a sample value of it could look like \\`1686000665919\\`.\n\\`\\`\\`\n- timestamp: date (1686000665919)\n\\`\\`\\`\n----------------\n\nThe following text contains fields and questions/answers for the 'accounts' index\n\nFields:\n- account_number: long (101)\n- address: text ('880 Holmes Lane')\n- age: long (32)\n- balance: long (39225)\n- city: text ('Brogan')\n- email: text ('amberduke@pyrami.com')\n- employer: text ('Pyrami')\n- firstname: text ('Amber')\n- gender: text ('M')\n- lastname: text ('Duke')\n- state: text ('IL')\n- registered_at: date (1686000665919)\n\nQuestion: Give me some documents in index 'accounts'\nPPL: source=\\`accounts\\` | head\n\nQuestion: Give me 5 oldest people in index 'accounts'\nPPL: source=\\`accounts\\` | sort -age | head 5\n\nQuestion: Give me first names of 5 youngest people in index 'accounts'\nPPL: source=\\`accounts\\` | sort +age | head 5 | fields \\`firstname\\`\n\nQuestion: Give me some addresses in index 'accounts'\nPPL: source=\\`accounts\\` | fields \\`address\\`\n\nQuestion: Find the documents in index 'accounts' where firstname is 'Hattie'\nPPL: source=\\`accounts\\` | where \\`firstname\\` = 'Hattie'\n\nQuestion: Find the emails where firstname is 'Hattie' or lastname is 'Frank' in index 'accounts'\nPPL: source=\\`accounts\\` | where \\`firstname\\` = 'Hattie' OR \\`lastname\\` = 'frank' | fields \\`email\\`\n\nQuestion: Find the documents in index 'accounts' where firstname is not 'Hattie' and lastname is not 'Frank'\nPPL: source=\\`accounts\\` | where \\`firstname\\` != 'Hattie' AND \\`lastname\\` != 'frank'\n\nQuestion: Find the emails that contain '.com' in index 'accounts'\nPPL: source=\\`accounts\\` | where QUERY_STRING(['email'], '.com') | fields \\`email\\`\n\nQuestion: Find the documents in index 'accounts' where there is an email\nPPL: source=\\`accounts\\` | where ISNOTNULL(\\`email\\`)\n\nQuestion: Count the number of documents in index 'accounts'\nPPL: source=\\`accounts\\` | stats COUNT() AS \\`count\\`\n\nQuestion: Count the number of people with firstnaQuestion: Count the number of people withe=\\`accounts\\` | where \\`firstname\\` ='Amber' | stats COUNT() AS \\`count\\`\n\nQuestion: How many people are older than 33? index is 'accounts'\nPPL: source=\\`accounts\\` | where \\`age\\` > 33 | stats COUNT() AS \\`count\\`\n\nQuestion: How many distinct ages? index is 'accounts'\nPPL: source=\\`accounts\\` | stats DISTINCT_COUNT(age) AS \\`distinct_count\\`\n\nQuestion: How many males and females in index 'accounts'?\nPPL: source=\\`accounts\\` | stats COUNT() AS \\`count\\` BY \\`gender\\`\n\nQuestion: What is the average, minimum, maximum age in 'accounts' index?\nPPL: source=\\`accounts\\` | stats AVG(\\`age\\`) AS \\`avg_age\\`, MIN(\\`age\\`) AS \\`min_age\\`, MAX(\\`age\\`) AS \\`max_age\\`\n\nQuestion: Show all states sorted by average balance. index is 'accounts'\nPPL: source=\\`accounts\\` | stats AVG(\\`balance\\`) AS \\`avg_balance\\` BY \\`state\\` | sort +avg_balance\n\n----------------\n\nThe following text contains fields and questions/answers for the 'ecommerce' index\n\nFields:\n- category: text ('Men's Clothing')\n- currency: keyword ('EUR')\n- customer_birth_date: date (null)\n- customer_first_name: text ('Eddie')\n- customer_full_name: text ('Eddie Underwood')\n- customer_gender: keyword ('MALE')\n- customer_id: keyword ('38')\n- customer_last_name: text ('Underwood')\n- customer_phone: keyword ('')\n- day_of_week: keyword ('Monday')\n- day_of_week_i: integer (0)\n- email: keyword ('eddie@underwood-family.zzz')\n- event.dataset: keyword ('sample_ecommerce')\n- geoip.city_name: keyword ('Cairo')\n- geoip.continent_name: keyword ('Africa')\n- geoip.country_iso_code: keyword ('EG')\n- geoip.location: geo_point ([object Object])\n- geoip.region_name: keyword ('Cairo Governorate')\n- manufacturer: text ('Elitelligence,Oceanavigations')\n- order_date: date (2023-06-05T09:28:48+00:00)\n- order_id: keyword ('584677')\n- products._id: text (null)\n- products.base_price: half_float (null)\n- products.base_unit_price: half_float (null)\n- products.category: text (null)\n- products.created_on: date (null)\n- products.discount_amount: half_float (null)\n- products.discount_percentage: half_float (null)\n- products.manufacturer: text (null)\n- products.min_price: half_float (null)\n- products.price: half_float (null)\n- products.product_id: long (null)\n- products.product_name: text (null)\n- products.quantity: integer (null)\n- products.sku: keyword (null)\n- products.tax_amount: half_float (null)\n- products.taxful_price: half_float (null)\n- products.taxless_price: half_float (null)\n- products.unit_discount_amount: half_float (null)\n- sku: keyword ('ZO0549605496,ZO0299602996')\n- taxful_total_price: half_float (36.98)\n- taxless_total_price: half_float (36.98)\n- total_quantity: integer (2)\n- total_unique_products: integer (2)\n- type: keyword ('order')\n- user: keyword ('eddie')\n\nQuestion: What is the average price of products in clothing category ordered in the last 7 days? index is 'ecommerce'\nPPL: source=\\`ecommerce\\` | where QUERY_STRING(['category'], 'clothing') AND \\`order_date\\` > DATE_SUB(NOW(), INTERVAL 7 DAY) | stats AVG(\\`taxful_total_price\\`) AS \\`avg_price\\`\n\nQuestion: What is the average price of products in each city ordered today by every 2 hours? index is 'ecommerce'\nPPL: source=\\`ecommerce\\` | where \\`order_date\\` > DATE_SUB(NOW(), INTERVAL 24 HOUR) | stats AVG(\\`taxful_total_price\\`) AS \\`avg_price\\` by SPAN(\\`order_date\\`, 2h) AS \\`span\\`, \\`geoip.city_name\\`\n\nQuestion: What is the total revenue of shoes each day in this week? index is 'ecommerce'\nPPL: source=\\`ecommerce\\` | where QUERY_STRING(['category'], 'shoes') AND \\`order_date\\` > DATE_SUB(NOW(), INTERVAL 1 WEEK) | stats SUM(\\`taxful_total_price\\`) AS \\`revenue\\` by SPAN(\\`order_date\\`, 1d) AS \\`span\\`\n\n----------------\n\nThe following text contains fields and questions/answers for the 'events' index\nFields:\n- timestamp: long (1686000665919)\n- attributes.data_stream.dataset: text ('nginx.access')\n- attributes.data_stream.namespace: text ('production')\n- attributes.data_stream.type: text ('logs')\n- body: text ('172.24.0.1 - - [02/Jun/2023:23:09:27 +0000] 'GET / HTTP/1.1' 200 4955 '-' 'Mozilla/5.0 zgrab/0.x'')\n- communication.source.address: text ('127.0.0.1')\n- communication.source.ip: text ('172.24.0.1')\n- container_id: text (null)\n- container_name: text (null)\n- event.category: text ('web')\n- event.domain: text ('nginx.access')\n- event.kind: text ('event')\n- event.name: text ('access')\n- event.result: text ('success')\n- event.type: text ('access')\n- http.flavor: text ('1.1')\n- http.request.method: text ('GET')\n- http.response.bytes: long (4955)\n- http.response.status_code: keyword ('200')\n- http.url: text ('/')\n- log: text (null)\n- observerTime: date (1686000665919)\n- source: text (null)\n- span_id: text ('abcdef1010')\n- trace_id: text ('102981ABCD2901')\n\nQuestion: What are recent logs with errors and contains word 'test'? index is 'events'\nPPL: source=\\`events\\` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') AND QUERY_STRING(['body'], 'test') AND \\`observerTime\\` > DATE_SUB(NOW(), INTERVAL 5 MINUTE)\n\nQuestion: What is the total number of log with a status code other than 200 in 2023 Feburary? index is 'events'\nPPL: source=\\`events\\` | where QUERY_STRING(['http.response.status_code'], '!200') AND \\`observerTime\\` >= '2023-03-01 00:00:00' AND \\`observerTime\\` < '2023-04-01 00:00:00' | stats COUNT() AS \\`count\\`\n\nQuestion: Count the number of business days that have web category logs last week? index is 'events'\nPPL: source=\\`events\\` | where \\`category\\` = 'web' AND \\`observerTime\\` > DATE_SUB(NOW(), INTERVAL 1 WEEK) AND DAY_OF_WEEK(\\`observerTime\\`) >= 2 AND DAY_OF_WEEK(\\`observerTime\\`) <= 6 | stats DISTINCT_COUNT(DATE_FORMAT(\\`observerTime\\`, 'yyyy-MM-dd')) AS \\`distinct_count\\`\n\nQuestion: What are the top traces with largest bytes? index is 'events'\nPPL: source=\\`events\\` | stats SUM(\\`http.response.bytes\\`) AS \\`sum_bytes\\` by \\`trace_id\\` | sort -sum_bytes | head\n\nQuestion: Give me log patterns? index is 'events'\nPPL: source=\\`events\\` | patterns \\`body\\` | stats take(\\`body\\`, 1) AS \\`sample_pattern\\` by \\`patterns_field\\` | fields \\`sample_pattern\\`\n\nQuestion: Give me log patterns for logs with errors? index is 'events'\nPPL: source=\\`events\\` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') | patterns \\`body\\` | stats take(\\`body\\`, 1) AS \\`sample_pattern\\` by \\`patterns_field\\` | fields \\`sample_pattern\\`\n\n----------------\n\nUse the following steps to generate the PPL query:\n\nStep 1. Find all field entities in the question.\n\nStep 2. Pick the fields that are relevant to the question from the provided fields list using entities. Rules:\n#01 Consider the field name, the field type, and the sample value when picking relevant fields. For example, if you need to filter flights departed from 'JFK', look for a \\`text\\` or \\`keyword\\` field with a field name such as 'departedAirport', and the sample value should be a 3 letter IATA airport code. Similarly, if you need a date field, look for a relevant field name with type \\`date\\` and not \\`long\\`.\n#02 You must pick a field with \\`date\\` type when filtering on date/time.\n#03 You must pick a field with \\`date\\` type when aggregating by time interval.\n#04 You must not use the sample value in PPL query, unless it is relevant to the question.\n#05 You must only pick fields that are relevant, and must pick the whole field name from the fields list.\n#06 You must not use fields that are not in the fields list.\n#07 You must not use the sample values unless relevant to the question.\n#08 You must pick the field that contains a log line when asked about log patterns. Usually it is one of \\`log\\`, \\`body\\`, \\`message\\`.\n\nStep 3. Use the choosen fields to write the PPL query. Rules:\n#01 Always use comparisons to filter date/time, eg. 'where \\`timestamp\\` > DATE_SUB(NOW(), INTERVAL 1 DAY)'; or by absolute time: 'where \\`timestamp\\` > 'yyyy-MM-dd HH:mm:ss'', eg. 'where \\`timestamp\\` < '2023-01-01 00:00:00''. Do not use \\`DATE_FORMAT()\\`.\n#02 Only use PPL syntax and keywords appeared in the question or in the examples.\n#03 If user asks for current or recent status, filter the time field for last 5 minutes.\n#04 The field used in 'SPAN(\\`\\`, )' must have type \\`date\\`, not \\`long\\`.\n#05 When aggregating by \\`SPAN\\` and another field, put \\`SPAN\\` after \\`by\\` and before the other field, eg. 'stats COUNT() AS \\`count\\` by SPAN(\\`timestamp\\`, 1d) AS \\`span\\`, \\`category\\`'.\n#06 You must put values in quotes when filtering fields with \\`text\\` or \\`keyword\\` field type.\n#07 To find documents that contain certain phrases in string fields, use \\`QUERY_STRING\\` which supports multiple fields and wildcard, eg. 'where QUERY_STRING(['field1', 'field2'], 'prefix*')'.\n#08 To find 4xx and 5xx errors using status code, if the status code field type is numberic (eg. \\`integer\\`), then use 'where \\`status_code\\` >= 400'; if the field is a string (eg. \\`text\\` or \\`keyword\\`), then use 'where QUERY_STRING(['status_code'], '4* OR 5*')'.\n\n----------------\nPut your PPL query in tags.\n----------------\nQuestion: ${indexInfo.question}? index is \\`${indexInfo.indexName}\\`\nFields:\n${indexInfo.mappingInfo}\n\nAssistant:", - "FINETUNE": "Below is an instruction that describes a task, paired with the index and corresponding fields that provides further context. Write a response that appropriately completes the request.\n\n### Instruction:\nI have an opensearch index with fields in the following. Now I have a question: ${indexInfo.question}. Can you help me generate a PPL for that?\n\n### Index:\n${indexInfo.indexName}\n\n### Fields:\n${indexInfo.mappingInfo}\n\n### Response:\n" -} \ No newline at end of file + "CLAUDE": "\n\nHuman:You will be given a question about some metrics from a user.\nUse context provided to write a PPL query that can be used to retrieve the information.\n\nHere is a sample PPL query:\nsource=`` | where `` = '``'\n\nHere are some sample questions and the PPL query to retrieve the information. The format for fields is\n```\n- field_name: field_type (sample field value)\n```\n\nFor example, below is a field called `timestamp`, it has a field type of `date`, and a sample value of it could look like `1686000665919`.\n```\n- timestamp: date (1686000665919)\n```\n----------------\n\nThe following text contains fields and questions/answers for the 'accounts' index\n\nFields:\n- account_number: long (101)\n- address: text ('880 Holmes Lane')\n- age: long (32)\n- balance: long (39225)\n- city: text ('Brogan')\n- email: text ('amberduke@pyrami.com')\n- employer: text ('Pyrami')\n- firstname: text ('Amber')\n- gender: text ('M')\n- lastname: text ('Duke')\n- state: text ('IL')\n- registered_at: date (1686000665919)\n\nQuestion: Give me some documents in index 'accounts'\nPPL: source=`accounts` | head\n\nQuestion: Give me 5 oldest people in index 'accounts'\nPPL: source=`accounts` | sort -age | head 5\n\nQuestion: Give me first names of 5 youngest people in index 'accounts'\nPPL: source=`accounts` | sort +age | head 5 | fields `firstname`\n\nQuestion: Give me some addresses in index 'accounts'\nPPL: source=`accounts` | fields `address`\n\nQuestion: Find the documents in index 'accounts' where firstname is 'Hattie'\nPPL: source=`accounts` | where `firstname` = 'Hattie'\n\nQuestion: Find the emails where firstname is 'Hattie' or lastname is 'Frank' in index 'accounts'\nPPL: source=`accounts` | where `firstname` = 'Hattie' OR `lastname` = 'frank' | fields `email`\n\nQuestion: Find the documents in index 'accounts' where firstname is not 'Hattie' and lastname is not 'Frank'\nPPL: source=`accounts` | where `firstname` != 'Hattie' AND `lastname` != 'frank'\n\nQuestion: Find the emails that contain '.com' in index 'accounts'\nPPL: source=`accounts` | where QUERY_STRING(['email'], '.com') | fields `email`\n\nQuestion: Find the documents in index 'accounts' where there is an email\nPPL: source=`accounts` | where ISNOTNULL(`email`)\n\nQuestion: Count the number of documents in index 'accounts'\nPPL: source=`accounts` | stats COUNT() AS `count`\n\nQuestion: Count the number of people with firstnaQuestion: Count the number of people withe=`accounts` | where `firstname` ='Amber' | stats COUNT() AS `count`\n\nQuestion: How many people are older than 33? index is 'accounts'\nPPL: source=`accounts` | where `age` > 33 | stats COUNT() AS `count`\n\nQuestion: How many distinct ages? index is 'accounts'\nPPL: source=`accounts` | stats DISTINCT_COUNT(age) AS `distinct_count`\n\nQuestion: How many males and females in index 'accounts'?\nPPL: source=`accounts` | stats COUNT() AS `count` BY `gender`\n\nQuestion: What is the average, minimum, maximum age in 'accounts' index?\nPPL: source=`accounts` | stats AVG(`age`) AS `avg_age`, MIN(`age`) AS `min_age`, MAX(`age`) AS `max_age`\n\nQuestion: Show all states sorted by average balance. index is 'accounts'\nPPL: source=`accounts` | stats AVG(`balance`) AS `avg_balance` BY `state` | sort +avg_balance\n\n----------------\n\nThe following text contains fields and questions/answers for the 'ecommerce' index\n\nFields:\n- category: text ('Men's Clothing')\n- currency: keyword ('EUR')\n- customer_birth_date: date (null)\n- customer_first_name: text ('Eddie')\n- customer_full_name: text ('Eddie Underwood')\n- customer_gender: keyword ('MALE')\n- customer_id: keyword ('38')\n- customer_last_name: text ('Underwood')\n- customer_phone: keyword ('')\n- day_of_week: keyword ('Monday')\n- day_of_week_i: integer (0)\n- email: keyword ('eddie@underwood-family.zzz')\n- event.dataset: keyword ('sample_ecommerce')\n- geoip.city_name: keyword ('Cairo')\n- geoip.continent_name: keyword ('Africa')\n- geoip.country_iso_code: keyword ('EG')\n- geoip.location: geo_point ([object Object])\n- geoip.region_name: keyword ('Cairo Governorate')\n- manufacturer: text ('Elitelligence,Oceanavigations')\n- order_date: date (2023-06-05T09:28:48+00:00)\n- order_id: keyword ('584677')\n- products._id: text (null)\n- products.base_price: half_float (null)\n- products.base_unit_price: half_float (null)\n- products.category: text (null)\n- products.created_on: date (null)\n- products.discount_amount: half_float (null)\n- products.discount_percentage: half_float (null)\n- products.manufacturer: text (null)\n- products.min_price: half_float (null)\n- products.price: half_float (null)\n- products.product_id: long (null)\n- products.product_name: text (null)\n- products.quantity: integer (null)\n- products.sku: keyword (null)\n- products.tax_amount: half_float (null)\n- products.taxful_price: half_float (null)\n- products.taxless_price: half_float (null)\n- products.unit_discount_amount: half_float (null)\n- sku: keyword ('ZO0549605496,ZO0299602996')\n- taxful_total_price: half_float (36.98)\n- taxless_total_price: half_float (36.98)\n- total_quantity: integer (2)\n- total_unique_products: integer (2)\n- type: keyword ('order')\n- user: keyword ('eddie')\n\nQuestion: What is the average price of products in clothing category ordered in the last 7 days? index is 'ecommerce'\nPPL: source=`ecommerce` | where QUERY_STRING(['category'], 'clothing') AND `order_date` > DATE_SUB(NOW(), INTERVAL 7 DAY) | stats AVG(`taxful_total_price`) AS `avg_price`\n\nQuestion: What is the average price of products in each city ordered today by every 2 hours? index is 'ecommerce'\nPPL: source=`ecommerce` | where `order_date` > DATE_SUB(NOW(), INTERVAL 24 HOUR) | stats AVG(`taxful_total_price`) AS `avg_price` by SPAN(`order_date`, 2h) AS `span`, `geoip.city_name`\n\nQuestion: What is the total revenue of shoes each day in this week? index is 'ecommerce'\nPPL: source=`ecommerce` | where QUERY_STRING(['category'], 'shoes') AND `order_date` > DATE_SUB(NOW(), INTERVAL 1 WEEK) | stats SUM(`taxful_total_price`) AS `revenue` by SPAN(`order_date`, 1d) AS `span`\n\n----------------\n\nThe following text contains fields and questions/answers for the 'events' index\nFields:\n- timestamp: long (1686000665919)\n- attributes.data_stream.dataset: text ('nginx.access')\n- attributes.data_stream.namespace: text ('production')\n- attributes.data_stream.type: text ('logs')\n- body: text ('172.24.0.1 - - [02/Jun/2023:23:09:27 +0000] 'GET / HTTP/1.1' 200 4955 '-' 'Mozilla/5.0 zgrab/0.x'')\n- communication.source.address: text ('127.0.0.1')\n- communication.source.ip: text ('172.24.0.1')\n- container_id: text (null)\n- container_name: text (null)\n- event.category: text ('web')\n- event.domain: text ('nginx.access')\n- event.kind: text ('event')\n- event.name: text ('access')\n- event.result: text ('success')\n- event.type: text ('access')\n- http.flavor: text ('1.1')\n- http.request.method: text ('GET')\n- http.response.bytes: long (4955)\n- http.response.status_code: keyword ('200')\n- http.url: text ('/')\n- log: text (null)\n- observerTime: date (1686000665919)\n- source: text (null)\n- span_id: text ('abcdef1010')\n- trace_id: text ('102981ABCD2901')\n\nQuestion: What are recent logs with errors and contains word 'test'? index is 'events'\nPPL: source=`events` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') AND QUERY_STRING(['body'], 'test') AND `observerTime` > DATE_SUB(NOW(), INTERVAL 5 MINUTE)\n\nQuestion: What is the total number of log with a status code other than 200 in 2023 Feburary? index is 'events'\nPPL: source=`events` | where QUERY_STRING(['http.response.status_code'], '!200') AND `observerTime` >= '2023-03-01 00:00:00' AND `observerTime` < '2023-04-01 00:00:00' | stats COUNT() AS `count`\n\nQuestion: Count the number of business days that have web category logs last week? index is 'events'\nPPL: source=`events` | where `category` = 'web' AND `observerTime` > DATE_SUB(NOW(), INTERVAL 1 WEEK) AND DAY_OF_WEEK(`observerTime`) >= 2 AND DAY_OF_WEEK(`observerTime`) <= 6 | stats DISTINCT_COUNT(DATE_FORMAT(`observerTime`, 'yyyy-MM-dd')) AS `distinct_count`\n\nQuestion: What are the top traces with largest bytes? index is 'events'\nPPL: source=`events` | stats SUM(`http.response.bytes`) AS `sum_bytes` by `trace_id` | sort -sum_bytes | head\n\nQuestion: Give me log patterns? index is 'events'\nPPL: source=`events` | patterns `body` | stats take(`body`, 1) AS `sample_pattern` by `patterns_field` | fields `sample_pattern`\n\nQuestion: Give me log patterns for logs with errors? index is 'events'\nPPL: source=`events` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') | patterns `body` | stats take(`body`, 1) AS `sample_pattern` by `patterns_field` | fields `sample_pattern`\n\n----------------\n\nUse the following steps to generate the PPL query:\n\nStep 1. Find all field entities in the question.\n\nStep 2. Pick the fields that are relevant to the question from the provided fields list using entities. Rules:\n#01 Consider the field name, the field type, and the sample value when picking relevant fields. For example, if you need to filter flights departed from 'JFK', look for a `text` or `keyword` field with a field name such as 'departedAirport', and the sample value should be a 3 letter IATA airport code. Similarly, if you need a date field, look for a relevant field name with type `date` and not `long`.\n#02 You must pick a field with `date` type when filtering on date/time.\n#03 You must pick a field with `date` type when aggregating by time interval.\n#04 You must not use the sample value in PPL query, unless it is relevant to the question.\n#05 You must only pick fields that are relevant, and must pick the whole field name from the fields list.\n#06 You must not use fields that are not in the fields list.\n#07 You must not use the sample values unless relevant to the question.\n#08 You must pick the field that contains a log line when asked about log patterns. Usually it is one of `log`, `body`, `message`.\n\nStep 3. Use the choosen fields to write the PPL query. Rules:\n#01 Always use comparisons to filter date/time, eg. 'where `timestamp` > DATE_SUB(NOW(), INTERVAL 1 DAY)'; or by absolute time: 'where `timestamp` > 'yyyy-MM-dd HH:mm:ss'', eg. 'where `timestamp` < '2023-01-01 00:00:00''. Do not use `DATE_FORMAT()`.\n#02 Only use PPL syntax and keywords appeared in the question or in the examples.\n#03 If user asks for current or recent status, filter the time field for last 5 minutes.\n#04 The field used in 'SPAN(``, )' must have type `date`, not `long`.\n#05 When aggregating by `SPAN` and another field, put `SPAN` after `by` and before the other field, eg. 'stats COUNT() AS `count` by SPAN(`timestamp`, 1d) AS `span`, `category`'.\n#06 You must put values in quotes when filtering fields with `text` or `keyword` field type.\n#07 To find documents that contain certain phrases in string fields, use `QUERY_STRING` which supports multiple fields and wildcard, eg. 'where QUERY_STRING(['field1', 'field2'], 'prefix*')'.\n#08 To find 4xx and 5xx errors using status code, if the status code field type is numberic (eg. `integer`), then use 'where `status_code` >= 400'; if the field is a string (eg. `text` or `keyword`), then use 'where QUERY_STRING(['status_code'], '4* OR 5*')'.\n\n----------------\nPlease only contain PPL inside your response.\n----------------\nQuestion : ${indexInfo.question}? index is `${indexInfo.indexName}`\nFields:\n${indexInfo.mappingInfo}\n\nAssistant:", + "FINETUNE": "Below is an instruction that describes a task, paired with the index and corresponding fields that provides further context. Write a response that appropriately completes the request.\n\n### Instruction:\nI have an opensearch index with fields in the following. Now I have a question: ${indexInfo.question}. Can you help me generate a PPL for that?\n\n### Index:\n${indexInfo.indexName}\n\n### Fields:\n${indexInfo.mappingInfo}\n\n### Response:\n", + "OPENAI": "You will be given a question about some metrics from a user.\nUse context provided to write a PPL query that can be used to retrieve the information.\n\nHere is a sample PPL query:\nsource=`` | where `` = '``'\n\nHere are some sample questions and the PPL query to retrieve the information. The format for fields is\n```\n- field_name: field_type (sample field value)\n```\n\nFor example, below is a field called `timestamp`, it has a field type of `date`, and a sample value of it could look like `1686000665919`.\n```\n- timestamp: date (1686000665919)\n```\n----------------\n\nThe following text contains fields and questions/answers for the 'accounts' index\n\nFields:\n- account_number: long (101)\n- address: text ('880 Holmes Lane')\n- age: long (32)\n- balance: long (39225)\n- city: text ('Brogan')\n- email: text ('amberduke@pyrami.com')\n- employer: text ('Pyrami')\n- firstname: text ('Amber')\n- gender: text ('M')\n- lastname: text ('Duke')\n- state: text ('IL')\n- registered_at: date (1686000665919)\n\nQuestion: Give me some documents in index 'accounts'\nPPL: source=`accounts` | head\n\nQuestion: Give me 5 oldest people in index 'accounts'\nPPL: source=`accounts` | sort -age | head 5\n\nQuestion: Give me first names of 5 youngest people in index 'accounts'\nPPL: source=`accounts` | sort +age | head 5 | fields `firstname`\n\nQuestion: Give me some addresses in index 'accounts'\nPPL: source=`accounts` | fields `address`\n\nQuestion: Find the documents in index 'accounts' where firstname is 'Hattie'\nPPL: source=`accounts` | where `firstname` = 'Hattie'\n\nQuestion: Find the emails where firstname is 'Hattie' or lastname is 'Frank' in index 'accounts'\nPPL: source=`accounts` | where `firstname` = 'Hattie' OR `lastname` = 'frank' | fields `email`\n\nQuestion: Find the documents in index 'accounts' where firstname is not 'Hattie' and lastname is not 'Frank'\nPPL: source=`accounts` | where `firstname` != 'Hattie' AND `lastname` != 'frank'\n\nQuestion: Find the emails that contain '.com' in index 'accounts'\nPPL: source=`accounts` | where QUERY_STRING(['email'], '.com') | fields `email`\n\nQuestion: Find the documents in index 'accounts' where there is an email\nPPL: source=`accounts` | where ISNOTNULL(`email`)\n\nQuestion: Count the number of documents in index 'accounts'\nPPL: source=`accounts` | stats COUNT() AS `count`\n\nQuestion: Count the number of people with firstnaQuestion: Count the number of people withe=`accounts` | where `firstname` ='Amber' | stats COUNT() AS `count`\n\nQuestion: How many people are older than 33? index is 'accounts'\nPPL: source=`accounts` | where `age` > 33 | stats COUNT() AS `count`\n\nQuestion: How many distinct ages? index is 'accounts'\nPPL: source=`accounts` | stats DISTINCT_COUNT(age) AS `distinct_count`\n\nQuestion: How many males and females in index 'accounts'?\nPPL: source=`accounts` | stats COUNT() AS `count` BY `gender`\n\nQuestion: What is the average, minimum, maximum age in 'accounts' index?\nPPL: source=`accounts` | stats AVG(`age`) AS `avg_age`, MIN(`age`) AS `min_age`, MAX(`age`) AS `max_age`\n\nQuestion: Show all states sorted by average balance. index is 'accounts'\nPPL: source=`accounts` | stats AVG(`balance`) AS `avg_balance` BY `state` | sort +avg_balance\n\n----------------\n\nThe following text contains fields and questions/answers for the 'ecommerce' index\n\nFields:\n- category: text ('Men's Clothing')\n- currency: keyword ('EUR')\n- customer_birth_date: date (null)\n- customer_first_name: text ('Eddie')\n- customer_full_name: text ('Eddie Underwood')\n- customer_gender: keyword ('MALE')\n- customer_id: keyword ('38')\n- customer_last_name: text ('Underwood')\n- customer_phone: keyword ('')\n- day_of_week: keyword ('Monday')\n- day_of_week_i: integer (0)\n- email: keyword ('eddie@underwood-family.zzz')\n- event.dataset: keyword ('sample_ecommerce')\n- geoip.city_name: keyword ('Cairo')\n- geoip.continent_name: keyword ('Africa')\n- geoip.country_iso_code: keyword ('EG')\n- geoip.location: geo_point ([object Object])\n- geoip.region_name: keyword ('Cairo Governorate')\n- manufacturer: text ('Elitelligence,Oceanavigations')\n- order_date: date (2023-06-05T09:28:48+00:00)\n- order_id: keyword ('584677')\n- products._id: text (null)\n- products.base_price: half_float (null)\n- products.base_unit_price: half_float (null)\n- products.category: text (null)\n- products.created_on: date (null)\n- products.discount_amount: half_float (null)\n- products.discount_percentage: half_float (null)\n- products.manufacturer: text (null)\n- products.min_price: half_float (null)\n- products.price: half_float (null)\n- products.product_id: long (null)\n- products.product_name: text (null)\n- products.quantity: integer (null)\n- products.sku: keyword (null)\n- products.tax_amount: half_float (null)\n- products.taxful_price: half_float (null)\n- products.taxless_price: half_float (null)\n- products.unit_discount_amount: half_float (null)\n- sku: keyword ('ZO0549605496,ZO0299602996')\n- taxful_total_price: half_float (36.98)\n- taxless_total_price: half_float (36.98)\n- total_quantity: integer (2)\n- total_unique_products: integer (2)\n- type: keyword ('order')\n- user: keyword ('eddie')\n\nQuestion: What is the average price of products in clothing category ordered in the last 7 days? index is 'ecommerce'\nPPL: source=`ecommerce` | where QUERY_STRING(['category'], 'clothing') AND `order_date` > DATE_SUB(NOW(), INTERVAL 7 DAY) | stats AVG(`taxful_total_price`) AS `avg_price`\n\nQuestion: What is the average price of products in each city ordered today by every 2 hours? index is 'ecommerce'\nPPL: source=`ecommerce` | where `order_date` > DATE_SUB(NOW(), INTERVAL 24 HOUR) | stats AVG(`taxful_total_price`) AS `avg_price` by SPAN(`order_date`, 2h) AS `span`, `geoip.city_name`\n\nQuestion: What is the total revenue of shoes each day in this week? index is 'ecommerce'\nPPL: source=`ecommerce` | where QUERY_STRING(['category'], 'shoes') AND `order_date` > DATE_SUB(NOW(), INTERVAL 1 WEEK) | stats SUM(`taxful_total_price`) AS `revenue` by SPAN(`order_date`, 1d) AS `span`\n\n----------------\n\nThe following text contains fields and questions/answers for the 'events' index\nFields:\n- timestamp: long (1686000665919)\n- attributes.data_stream.dataset: text ('nginx.access')\n- attributes.data_stream.namespace: text ('production')\n- attributes.data_stream.type: text ('logs')\n- body: text ('172.24.0.1 - - [02/Jun/2023:23:09:27 +0000] 'GET / HTTP/1.1' 200 4955 '-' 'Mozilla/5.0 zgrab/0.x'')\n- communication.source.address: text ('127.0.0.1')\n- communication.source.ip: text ('172.24.0.1')\n- container_id: text (null)\n- container_name: text (null)\n- event.category: text ('web')\n- event.domain: text ('nginx.access')\n- event.kind: text ('event')\n- event.name: text ('access')\n- event.result: text ('success')\n- event.type: text ('access')\n- http.flavor: text ('1.1')\n- http.request.method: text ('GET')\n- http.response.bytes: long (4955)\n- http.response.status_code: keyword ('200')\n- http.url: text ('/')\n- log: text (null)\n- observerTime: date (1686000665919)\n- source: text (null)\n- span_id: text ('abcdef1010')\n- trace_id: text ('102981ABCD2901')\n\nQuestion: What are recent logs with errors and contains word 'test'? index is 'events'\nPPL: source=`events` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') AND QUERY_STRING(['body'], 'test') AND `observerTime` > DATE_SUB(NOW(), INTERVAL 5 MINUTE)\n\nQuestion: What is the total number of log with a status code other than 200 in 2023 Feburary? index is 'events'\nPPL: source=`events` | where QUERY_STRING(['http.response.status_code'], '!200') AND `observerTime` >= '2023-03-01 00:00:00' AND `observerTime` < '2023-04-01 00:00:00' | stats COUNT() AS `count`\n\nQuestion: Count the number of business days that have web category logs last week? index is 'events'\nPPL: source=`events` | where `category` = 'web' AND `observerTime` > DATE_SUB(NOW(), INTERVAL 1 WEEK) AND DAY_OF_WEEK(`observerTime`) >= 2 AND DAY_OF_WEEK(`observerTime`) <= 6 | stats DISTINCT_COUNT(DATE_FORMAT(`observerTime`, 'yyyy-MM-dd')) AS `distinct_count`\n\nQuestion: What are the top traces with largest bytes? index is 'events'\nPPL: source=`events` | stats SUM(`http.response.bytes`) AS `sum_bytes` by `trace_id` | sort -sum_bytes | head\n\nQuestion: Give me log patterns? index is 'events'\nPPL: source=`events` | patterns `body` | stats take(`body`, 1) AS `sample_pattern` by `patterns_field` | fields `sample_pattern`\n\nQuestion: Give me log patterns for logs with errors? index is 'events'\nPPL: source=`events` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') | patterns `body` | stats take(`body`, 1) AS `sample_pattern` by `patterns_field` | fields `sample_pattern`\n\n----------------\n\nUse the following steps to generate the PPL query:\n\nStep 1. Find all field entities in the question.\n\nStep 2. Pick the fields that are relevant to the question from the provided fields list using entities. Rules:\n#01 Consider the field name, the field type, and the sample value when picking relevant fields. For example, if you need to filter flights departed from 'JFK', look for a `text` or `keyword` field with a field name such as 'departedAirport', and the sample value should be a 3 letter IATA airport code. Similarly, if you need a date field, look for a relevant field name with type `date` and not `long`.\n#02 You must pick a field with `date` type when filtering on date/time.\n#03 You must pick a field with `date` type when aggregating by time interval.\n#04 You must not use the sample value in PPL query, unless it is relevant to the question.\n#05 You must only pick fields that are relevant, and must pick the whole field name from the fields list.\n#06 You must not use fields that are not in the fields list.\n#07 You must not use the sample values unless relevant to the question.\n#08 You must pick the field that contains a log line when asked about log patterns. Usually it is one of `log`, `body`, `message`.\n\nStep 3. Use the choosen fields to write the PPL query. Rules:\n#01 Always use comparisons to filter date/time, eg. 'where `timestamp` > DATE_SUB(NOW(), INTERVAL 1 DAY)'; or by absolute time: 'where `timestamp` > 'yyyy-MM-dd HH:mm:ss'', eg. 'where `timestamp` < '2023-01-01 00:00:00''. Do not use `DATE_FORMAT()`.\n#02 Only use PPL syntax and keywords appeared in the question or in the examples.\n#03 If user asks for current or recent status, filter the time field for last 5 minutes.\n#04 The field used in 'SPAN(``, )' must have type `date`, not `long`.\n#05 When aggregating by `SPAN` and another field, put `SPAN` after `by` and before the other field, eg. 'stats COUNT() AS `count` by SPAN(`timestamp`, 1d) AS `span`, `category`'.\n#06 You must put values in quotes when filtering fields with `text` or `keyword` field type.\n#07 To find documents that contain certain phrases in string fields, use `QUERY_STRING` which supports multiple fields and wildcard, eg. 'where QUERY_STRING(['field1', 'field2'], 'prefix*')'.\n#08 To find 4xx and 5xx errors using status code, if the status code field type is numberic (eg. `integer`), then use 'where `status_code` >= 400'; if the field is a string (eg. `text` or `keyword`), then use 'where QUERY_STRING(['status_code'], '4* OR 5*')'.\n\n----------------\nOutput format: use xml tags to surround your PPL query, eg. source=index.\n----------------\nQuestion : ${indexInfo.question}? index is `${indexInfo.indexName}`\nFields:\n${indexInfo.mappingInfo}" +} From d87cd0933637ac6d39a7a4e257307acea6607f88 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 7 Feb 2024 09:58:50 +0800 Subject: [PATCH 056/119] Fix failure ITs caused by ml-common error handling change (#195) (#196) * Fix failure ITs caused by ml-common error handling change * format code --------- (cherry picked from commit e5dc517fa1889302a0afe18072261428e07fe47c) Signed-off-by: zane-neo Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../integTest/NeuralSparseSearchToolIT.java | 11 ++++------- .../org/opensearch/integTest/SearchIndexToolIT.java | 2 +- 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java b/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java index 58431a0a..cda051f0 100644 --- a/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java +++ b/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java @@ -112,7 +112,7 @@ public void testNeuralSparseSearchToolInFlowAgent() { org.hamcrest.MatcherAssert .assertThat( exception.getMessage(), - allOf(containsString("[input] is null or empty, can not process it."), containsString("illegal_argument_exception")) + allOf(containsString("[input] is null or empty, can not process it."), containsString("IllegalArgumentException")) ); } @@ -134,10 +134,7 @@ public void testNeuralSparseSearchToolInFlowAgent_withIllegalEmbeddingField_then org.hamcrest.MatcherAssert .assertThat( exception.getMessage(), - allOf( - containsString("failed to create query: [neural_sparse] query only works on [rank_features] fields"), - containsString("search_phase_execution_exception") - ) + allOf(containsString("all shards failed"), containsString("SearchPhaseExecutionException")) ); } @@ -148,7 +145,7 @@ public void testNeuralSparseSearchToolInFlowAgent_withIllegalIndexField_thenThro org.hamcrest.MatcherAssert .assertThat( exception.getMessage(), - allOf(containsString("no such index [test_index2]"), containsString("index_not_found_exception")) + allOf(containsString("no such index [test_index2]"), containsString("IndexNotFoundException")) ); } @@ -157,6 +154,6 @@ public void testNeuralSparseSearchToolInFlowAgent_withIllegalModelIdField_thenTh Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); org.hamcrest.MatcherAssert - .assertThat(exception.getMessage(), allOf(containsString("Failed to find model"), containsString("status_exception"))); + .assertThat(exception.getMessage(), allOf(containsString("Failed to find model"), containsString("OpenSearchStatusException"))); } } diff --git a/src/test/java/org/opensearch/integTest/SearchIndexToolIT.java b/src/test/java/org/opensearch/integTest/SearchIndexToolIT.java index 8b7c9697..f989ebef 100644 --- a/src/test/java/org/opensearch/integTest/SearchIndexToolIT.java +++ b/src/test/java/org/opensearch/integTest/SearchIndexToolIT.java @@ -131,6 +131,6 @@ public void testSearchIndexToolInFlowAgent_withIllegalQueryField_thenThrowExcept + " }\n" + "}\n"; Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, agentInput)); - MatcherAssert.assertThat(exception.getMessage(), containsString("parsing_exception")); + MatcherAssert.assertThat(exception.getMessage(), containsString("ParsingException")); } } From 1ee884ea0afee6fb5a5668219d4f92cd04722146 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 7 Feb 2024 10:22:17 +0800 Subject: [PATCH 057/119] Update mockito monorepo to v5.10.0 (#128) (#197) (cherry picked from commit cb6e97081faeba3b8939924d19c7900d9cc06b32) Signed-off-by: mend-for-github-com[bot] Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] Co-authored-by: mend-for-github-com[bot] <50673670+mend-for-github-com[bot]@users.noreply.github.com> --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index 9f1f09cb..e8be4220 100644 --- a/build.gradle +++ b/build.gradle @@ -135,12 +135,12 @@ dependencies { testImplementation "org.opensearch.test:framework:${opensearch_version}" testImplementation group: 'junit', name: 'junit', version: '4.13.2' testImplementation group: 'org.json', name: 'json', version: '20231013' - testImplementation group: 'org.mockito', name: 'mockito-core', version: '5.8.0' + testImplementation group: 'org.mockito', name: 'mockito-core', version: '5.10.0' testImplementation group: 'org.mockito', name: 'mockito-inline', version: '5.2.0' testImplementation("net.bytebuddy:byte-buddy:1.14.7") testImplementation("net.bytebuddy:byte-buddy-agent:1.14.7") testImplementation 'org.junit.jupiter:junit-jupiter-api:5.10.1' - testImplementation 'org.mockito:mockito-junit-jupiter:5.8.0' + testImplementation 'org.mockito:mockito-junit-jupiter:5.10.0' testImplementation "com.nhaarman.mockitokotlin2:mockito-kotlin:2.2.0" testImplementation "com.cronutils:cron-utils:9.2.1" testImplementation "commons-validator:commons-validator:1.8.0" From 69843eb5937181c8ef3d8ecb5b0d3fa0037c5682 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Tue, 6 Feb 2024 19:05:49 -0800 Subject: [PATCH 058/119] Fixed POM to include inception year (#190) (#200) (cherry picked from commit 60d9d117186a362b6bf65aa39eaa77a0c46e47ff) Signed-off-by: owaiskazi19 Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- build.gradle | 60 ++++++++++++++++++++++++++++++++++++++++------------ 1 file changed, 47 insertions(+), 13 deletions(-) diff --git a/build.gradle b/build.gradle index e8be4220..954eecb3 100644 --- a/build.gradle +++ b/build.gradle @@ -5,6 +5,7 @@ import org.opensearch.gradle.test.RestIntegTestTask import java.util.concurrent.Callable +import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin buildscript { ext { @@ -125,7 +126,7 @@ dependencies { // ZipArchive dependencies used for integration tests zipArchive group: 'org.opensearch.plugin', name:'opensearch-ml-plugin', version: "${version}" zipArchive group: 'org.opensearch.plugin', name:'opensearch-job-scheduler', version: "${version}" - zipArchive "org.opensearch.plugin:opensearch-anomaly-detection:${version}" + zipArchive group: 'org.opensearch.plugin', name:'opensearch-anomaly-detection', version: "${version}" zipArchive group: 'org.opensearch.plugin', name:'opensearch-sql-plugin', version: "${version}" zipArchive group: 'org.opensearch.plugin', name:'opensearch-knn', version: "${version}" zipArchive group: 'org.opensearch.plugin', name:'neural-search', version: "${version}" @@ -234,21 +235,44 @@ opensearchplugin { noticeFile rootProject.file("NOTICE") } -publishing { - repositories { - maven { - name = 'staging' - url = "${rootProject.buildDir}/local-staging-repo" - } - maven { - name = "Snapshots" - url = "https://aws.oss.sonatype.org/content/repositories/snapshots" - credentials { - username "$System.env.SONATYPE_USERNAME" - password "$System.env.SONATYPE_PASSWORD" +allprojects { + // Default to the apache license + project.ext.licenseName = 'The Apache Software License, Version 2.0' + project.ext.licenseUrl = 'http://www.apache.org/licenses/LICENSE-2.0.txt' + plugins.withType(ShadowPlugin).whenPluginAdded { + publishing { + repositories { + maven { + name = 'staging' + url = "${rootProject.buildDir}/local-staging-repo" + } + } + publications { + // add license information to generated poms + all { + pom { + name = "skills" + description = "Tools for Agent Framework" + } + pom.withXml { XmlProvider xml -> + Node node = xml.asNode() + node.appendNode('inceptionYear', '2021') + + Node license = node.appendNode('licenses').appendNode('license') + license.appendNode('name', project.licenseName) + license.appendNode('url', project.licenseUrl) + + Node developer = node.appendNode('developers').appendNode('developer') + developer.appendNode('name', 'OpenSearch') + developer.appendNode('url', 'https://github.com/opensearch-project/')skills + } + } } } } +} + +publishing { publications { pluginZip(MavenPublication) { publication -> pom { @@ -270,6 +294,16 @@ publishing { } } } + repositories { + maven { + name = "Snapshots" + url = "https://aws.oss.sonatype.org/content/repositories/snapshots" + credentials { + username "$System.env.SONATYPE_USERNAME" + password "$System.env.SONATYPE_PASSWORD" + } + } + } gradle.startParameter.setShowStacktrace(ShowStacktrace.ALWAYS) gradle.startParameter.setLogLevel(LogLevel.DEBUG) } From 59204b706546f65e2e956a7436ad1fd2383ff159 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 7 Feb 2024 11:16:30 +0800 Subject: [PATCH 059/119] Fix json parsing exception for NeuralSparseSearchTool and VectorDBTool (#203) (#204) * fix the json string parsing * add it --------- (cherry picked from commit 38f5847f90f6e996dd8fbff0e0775fbbcf72c9ba) Signed-off-by: zhichao-aws Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../agent/tools/AbstractRetrieverTool.java | 6 ++++- .../agent/tools/NeuralSparseSearchTool.java | 20 ++++++++++------- .../opensearch/agent/tools/VectorDBTool.java | 22 ++++++++++--------- .../tools/NeuralSparseSearchToolTests.java | 18 ++++++++++----- .../agent/tools/VectorDBToolTests.java | 20 ++++++++++++----- .../integTest/NeuralSparseSearchToolIT.java | 11 ++++++++++ 6 files changed, 68 insertions(+), 29 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java b/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java index b2a0860c..5003f0fa 100644 --- a/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java +++ b/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java @@ -8,6 +8,8 @@ import static org.opensearch.ml.common.utils.StringUtils.gson; import java.io.IOException; +import java.security.AccessController; +import java.security.PrivilegedExceptionAction; import java.util.HashMap; import java.util.Map; @@ -109,7 +111,9 @@ public void run(Map parameters, ActionListener listener) StringBuilder contextBuilder = new StringBuilder(); for (SearchHit hit : hits) { Map docContent = processResponse(hit); - contextBuilder.append(gson.toJson(docContent)).append("\n"); + String docContentInString = AccessController + .doPrivileged((PrivilegedExceptionAction) () -> gson.toJson(docContent)); + contextBuilder.append(docContentInString).append("\n"); } listener.onResponse((T) contextBuilder.toString()); } else { diff --git a/src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java b/src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java index 9e2ba1f7..cbe0d393 100644 --- a/src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java +++ b/src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java @@ -7,6 +7,9 @@ import static org.opensearch.ml.common.utils.StringUtils.gson; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; import java.util.Map; import org.apache.commons.lang3.StringUtils; @@ -57,14 +60,15 @@ protected String getQueryBody(String queryText) { "Parameter [" + EMBEDDING_FIELD + "] and [" + MODEL_ID_FIELD + "] can not be null or empty." ); } - return "{\"query\":{\"neural_sparse\":{\"" - + embeddingField - + "\":{\"query_text\":\"" - + queryText - + "\",\"model_id\":\"" - + modelId - + "\"}}}" - + " }"; + + Map queryBody = Map + .of("query", Map.of("neural_sparse", Map.of(embeddingField, Map.of("query_text", queryText, "model_id", modelId)))); + + try { + return AccessController.doPrivileged((PrivilegedExceptionAction) () -> gson.toJson(queryBody)); + } catch (PrivilegedActionException e) { + throw new RuntimeException(e); + } } @Override diff --git a/src/main/java/org/opensearch/agent/tools/VectorDBTool.java b/src/main/java/org/opensearch/agent/tools/VectorDBTool.java index dd83cb46..4b5b41fa 100644 --- a/src/main/java/org/opensearch/agent/tools/VectorDBTool.java +++ b/src/main/java/org/opensearch/agent/tools/VectorDBTool.java @@ -7,6 +7,9 @@ import static org.opensearch.ml.common.utils.StringUtils.gson; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; import java.util.Map; import org.apache.commons.lang3.StringUtils; @@ -65,16 +68,15 @@ protected String getQueryBody(String queryText) { "Parameter [" + EMBEDDING_FIELD + "] and [" + MODEL_ID_FIELD + "] can not be null or empty." ); } - return "{\"query\":{\"neural\":{\"" - + embeddingField - + "\":{\"query_text\":\"" - + queryText - + "\",\"model_id\":\"" - + modelId - + "\",\"k\":" - + k - + "}}}" - + " }"; + + Map queryBody = Map + .of("query", Map.of("neural", Map.of(embeddingField, Map.of("query_text", queryText, "model_id", modelId, "k", k)))); + + try { + return AccessController.doPrivileged((PrivilegedExceptionAction) () -> gson.toJson(queryBody)); + } catch (PrivilegedActionException e) { + throw new RuntimeException(e); + } } @Override diff --git a/src/test/java/org/opensearch/agent/tools/NeuralSparseSearchToolTests.java b/src/test/java/org/opensearch/agent/tools/NeuralSparseSearchToolTests.java index fac45f54..4491db43 100644 --- a/src/test/java/org/opensearch/agent/tools/NeuralSparseSearchToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/NeuralSparseSearchToolTests.java @@ -55,11 +55,19 @@ public void testCreateTool() { @SneakyThrows public void testGetQueryBody() { NeuralSparseSearchTool tool = NeuralSparseSearchTool.Factory.getInstance().create(params); - assertEquals( - "{\"query\":{\"neural_sparse\":{\"test embedding\":{\"" - + "query_text\":\"123fsd23134sdfouh\",\"model_id\":\"123fsd23134\"}}} }", - tool.getQueryBody(TEST_QUERY_TEXT) - ); + Map>>> queryBody = gson.fromJson(tool.getQueryBody(TEST_QUERY_TEXT), Map.class); + assertEquals("123fsd23134sdfouh", queryBody.get("query").get("neural_sparse").get("test embedding").get("query_text")); + assertEquals("123fsd23134", queryBody.get("query").get("neural_sparse").get("test embedding").get("model_id")); + } + + @Test + @SneakyThrows + public void testGetQueryBodyWithJsonObjectString() { + NeuralSparseSearchTool tool = NeuralSparseSearchTool.Factory.getInstance().create(params); + String jsonInput = gson.toJson(Map.of("hi", "a")); + Map>>> queryBody = gson.fromJson(tool.getQueryBody(jsonInput), Map.class); + assertEquals("{\"hi\":\"a\"}", queryBody.get("query").get("neural_sparse").get("test embedding").get("query_text")); + assertEquals("123fsd23134", queryBody.get("query").get("neural_sparse").get("test embedding").get("model_id")); } @Test diff --git a/src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java b/src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java index cce80d5b..849f9254 100644 --- a/src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java @@ -55,11 +55,21 @@ public void testCreateTool() { @SneakyThrows public void testGetQueryBody() { VectorDBTool tool = VectorDBTool.Factory.getInstance().create(params); - assertEquals( - "{\"query\":{\"neural\":{\"test embedding\":{\"" - + "query_text\":\"123fsd23134sdfouh\",\"model_id\":\"123fsd23134\",\"k\":123}}} }", - tool.getQueryBody(TEST_QUERY_TEXT) - ); + Map>>> queryBody = gson.fromJson(tool.getQueryBody(TEST_QUERY_TEXT), Map.class); + assertEquals("123fsd23134sdfouh", queryBody.get("query").get("neural").get("test embedding").get("query_text")); + assertEquals("123fsd23134", queryBody.get("query").get("neural").get("test embedding").get("model_id")); + assertEquals(123.0, queryBody.get("query").get("neural").get("test embedding").get("k")); + } + + @Test + @SneakyThrows + public void testGetQueryBodyWithJsonObjectString() { + VectorDBTool tool = VectorDBTool.Factory.getInstance().create(params); + String jsonInput = gson.toJson(Map.of("hi", "a")); + Map>>> queryBody = gson.fromJson(tool.getQueryBody(jsonInput), Map.class); + assertEquals("{\"hi\":\"a\"}", queryBody.get("query").get("neural").get("test embedding").get("query_text")); + assertEquals("123fsd23134", queryBody.get("query").get("neural").get("test embedding").get("model_id")); + assertEquals(123.0, queryBody.get("query").get("neural").get("test embedding").get("k")); } @Test diff --git a/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java b/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java index cda051f0..f6575c09 100644 --- a/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java +++ b/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java @@ -7,6 +7,7 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; +import static org.opensearch.ml.common.utils.StringUtils.gson; import java.nio.file.Files; import java.nio.file.Path; @@ -114,6 +115,16 @@ public void testNeuralSparseSearchToolInFlowAgent() { exception.getMessage(), allOf(containsString("[input] is null or empty, can not process it."), containsString("IllegalArgumentException")) ); + + // use json string input + String jsonInput = gson.toJson(Map.of("parameters", Map.of("question", gson.toJson(Map.of("hi", "a"))))); + String result3 = executeAgent(agentId, jsonInput); + assertEquals( + "The agent execute response not equal with expected.", + "{\"_index\":\"test_index\",\"_source\":{\"text\":\"text doc 3\"},\"_id\":\"2\",\"_score\":2.4136734}\n" + + "{\"_index\":\"test_index\",\"_source\":{\"text\":\"text doc 2\"},\"_id\":\"1\",\"_score\":1.2068367}\n", + result3 + ); } public void testNeuralSparseSearchToolInFlowAgent_withIllegalSourceField_thenGetEmptySource() { From e5c9c73985007c1e4d9f6a42913218cffae4bff6 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 7 Feb 2024 16:36:57 -0800 Subject: [PATCH 060/119] Fixed build script to run assemble (#207) (#208) (cherry picked from commit 7c2ccf8aaad6552fb04cf17ab41643ffc736266a) Signed-off-by: owaiskazi19 Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- scripts/build.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build.sh b/scripts/build.sh index 25e5db6c..490d93fe 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -64,7 +64,7 @@ fi [[ "$SNAPSHOT" == "true" ]] && VERSION=$VERSION-SNAPSHOT [ -z "$OUTPUT" ] && OUTPUT=artifacts -./gradlew build -x test -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER +./gradlew assemble -x test -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER ./gradlew publishToMavenLocal -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER ./gradlew publishPluginZipPublicationToZipStagingRepository -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER mkdir -p $OUTPUT/maven/org/opensearch From 4dfdf9abde1bf194d69ecb41903596fff172d1a8 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Thu, 8 Feb 2024 20:16:49 +0800 Subject: [PATCH 061/119] add truncate For PPL Tool (#206) (#211) * add truncate * use head instead * fix typo * use head instead * fix UT --------- (cherry picked from commit 209bb1536637cba13c63058989e841d952fc302f) Signed-off-by: xinyual Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../org/opensearch/agent/tools/PPLTool.java | 22 +++++++++++++- .../opensearch/agent/tools/PPLToolTests.java | 30 +++++++++++++++++++ 2 files changed, 51 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index c2820c33..c70bf030 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -94,6 +94,8 @@ public class PPLTool implements Tool { private String previousToolKey; + private int head; + private static Gson gson = new Gson(); private static Map DEFAULT_PROMPT_DICT; @@ -147,7 +149,15 @@ public static PPLModelType from(String value) { } - public PPLTool(Client client, String modelId, String contextPrompt, String pplModelType, String previousToolKey, boolean execute) { + public PPLTool( + Client client, + String modelId, + String contextPrompt, + String pplModelType, + String previousToolKey, + int head, + boolean execute + ) { this.client = client; this.modelId = modelId; this.pplModelType = PPLModelType.from(pplModelType); @@ -157,6 +167,7 @@ public PPLTool(Client client, String modelId, String contextPrompt, String pplMo this.contextPrompt = contextPrompt; } this.previousToolKey = previousToolKey; + this.head = head; this.execute = execute; } @@ -307,6 +318,7 @@ public PPLTool create(Map map) { (String) map.getOrDefault("prompt", ""), (String) map.getOrDefault("model_type", ""), (String) map.getOrDefault("previous_tool_name", ""), + Integer.valueOf((String) map.getOrDefault("head", "-1")), Boolean.valueOf((String) map.getOrDefault("execute", "true")) ); } @@ -487,6 +499,14 @@ private String parseOutput(String llmOutput, String indexName) { } ppl = ppl.replace("`", ""); ppl = ppl.replaceAll("\\bSPAN\\(", "span("); + if (this.head > 0) { + String[] lists = llmOutput.split("\\|"); + String lastCommand = lists[lists.length - 1].strip(); + if (!lastCommand.toLowerCase(Locale.ROOT).startsWith("head")) // not handle cases source=...| ... | head 5 | head + { + ppl = ppl + " | head " + this.head; + } + } return ppl; } diff --git a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java index b3c96816..4c73817b 100644 --- a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java @@ -153,6 +153,36 @@ public void testTool_withPreviousInput() { } + @Test + public void testTool_withHEADButIgnore() { + PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt", "head", "5")); + assertEquals(PPLTool.TYPE, tool.getName()); + + tool.run(ImmutableMap.of("index", "demo", "question", "demo"), ActionListener.wrap(executePPLResult -> { + Map returnResults = gson.fromJson(executePPLResult, Map.class); + assertEquals("ppl result", returnResults.get("executionResult")); + assertEquals("source=demo| head 1", returnResults.get("ppl")); + }, e -> { log.info(e); })); + + } + + @Test + public void testTool_withHEAD() { + pplReturns = Collections.singletonMap("response", "source=demo"); + modelTensor = new ModelTensor("tensor", new Number[0], new long[0], MLResultDataType.STRING, null, null, pplReturns); + initMLTensors(); + + PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt", "head", "5")); + assertEquals(PPLTool.TYPE, tool.getName()); + + tool.run(ImmutableMap.of("index", "demo", "question", "demo"), ActionListener.wrap(executePPLResult -> { + Map returnResults = gson.fromJson(executePPLResult, Map.class); + assertEquals("ppl result", returnResults.get("executionResult")); + assertEquals("source=demo | head 5", returnResults.get("ppl")); + }, e -> { log.info(e); })); + + } + @Test public void testTool_with_WithoutExecution() { PPLTool tool = PPLTool.Factory From ddf8523bec836824b2e76bd36333506318d28f5e Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 9 Feb 2024 08:22:55 +0800 Subject: [PATCH 062/119] Added release notes for 2.12 (#213) (#214) (cherry picked from commit 7afb56850050b89bfa7df7ce9c9826b82d3dcf32) Signed-off-by: owaiskazi19 Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- release-notes/skills.release-notes-2.12.0.0.md | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 release-notes/skills.release-notes-2.12.0.0.md diff --git a/release-notes/skills.release-notes-2.12.0.0.md b/release-notes/skills.release-notes-2.12.0.0.md new file mode 100644 index 00000000..4c763641 --- /dev/null +++ b/release-notes/skills.release-notes-2.12.0.0.md @@ -0,0 +1,6 @@ +# 2024-02-08 Version 2.12.0.0 + +Compatible with OpenSearch 2.12.0 + +### Features +* Initial release of Skills \ No newline at end of file From 69ccd7f562abfb435b6249192d2604f93629508b Mon Sep 17 00:00:00 2001 From: Owais Kazi Date: Thu, 8 Feb 2024 16:29:11 -0800 Subject: [PATCH 063/119] Fixed build (#215) Signed-off-by: owaiskazi19 --- build.gradle | 59 ++++++++++++++++++++++++++++------------------------ 1 file changed, 32 insertions(+), 27 deletions(-) diff --git a/build.gradle b/build.gradle index 954eecb3..5f98257b 100644 --- a/build.gradle +++ b/build.gradle @@ -11,8 +11,19 @@ buildscript { ext { opensearch_group = "org.opensearch" opensearch_version = System.getProperty("opensearch.version", "2.12.0-SNAPSHOT") - isSnapshot = "true" == System.getProperty("build.snapshot", "true") buildVersionQualifier = System.getProperty("build.version_qualifier", "") + isSnapshot = "true" == System.getProperty("build.snapshot", "true") + version_tokens = opensearch_version.tokenize('-') + opensearch_build = version_tokens[0] + '.0' + plugin_no_snapshot = opensearch_build + if (buildVersionQualifier) { + opensearch_build += "-${buildVersionQualifier}" + plugin_no_snapshot += "-${buildVersionQualifier}" + } + if (isSnapshot) { + opensearch_build += "-SNAPSHOT" + } + opensearch_no_snapshot = opensearch_build.replace("-SNAPSHOT","") kotlin_version = System.getProperty("kotlin.version", "1.8.21") } @@ -46,14 +57,8 @@ repositories { } allprojects { - group 'org.opensearch' - version = opensearch_version.tokenize('-')[0] + '.0' - if (buildVersionQualifier) { - version += "-${buildVersionQualifier}" - } - if (isSnapshot) { - version += "-SNAPSHOT" - } + group = opensearch_group + version = "${opensearch_build}" } targetCompatibility = JavaVersion.VERSION_11 @@ -87,19 +92,19 @@ configurations { task addJarsToClasspath(type: Copy) { from(fileTree(dir: sqlJarDirectory)) { - include "opensearch-sql-${version}.jar" - include "ppl-${version}.jar" - include "protocol-${version}.jar" + include "opensearch-sql-${opensearch_build}.jar" + include "ppl-${opensearch_build}.jar" + include "protocol-${opensearch_build}.jar" } into("$buildDir/classes") from(fileTree(dir: jsJarDirectory)) { - include "opensearch-job-scheduler-${version}.jar" + include "opensearch-job-scheduler-${opensearch_build}.jar" } into("$buildDir/classes") from(fileTree(dir: adJarDirectory)) { - include "opensearch-anomaly-detection-${version}.jar" + include "opensearch-anomaly-detection-${opensearch_build}.jar" } into("$buildDir/classes") } @@ -114,23 +119,23 @@ dependencies { compileOnly group: 'org.apache.commons', name: 'commons-text', version: '1.10.0' // Plugin dependencies - compileOnly group: 'org.opensearch', name:'opensearch-ml-client', version: "${version}" - implementation fileTree(dir: jsJarDirectory, include: ["opensearch-job-scheduler-${version}.jar"]) - implementation fileTree(dir: adJarDirectory, include: ["opensearch-anomaly-detection-${version}.jar"]) - implementation fileTree(dir: sqlJarDirectory, include: ["opensearch-sql-${version}.jar", "ppl-${version}.jar", "protocol-${version}.jar"]) - compileOnly "org.opensearch:common-utils:${version}" + compileOnly group: 'org.opensearch', name:'opensearch-ml-client', version: "${opensearch_build}" + implementation fileTree(dir: jsJarDirectory, include: ["opensearch-job-scheduler-${opensearch_build}.jar"]) + implementation fileTree(dir: adJarDirectory, include: ["opensearch-anomaly-detection-${opensearch_build}.jar"]) + implementation fileTree(dir: sqlJarDirectory, include: ["opensearch-sql-${opensearch_build}.jar", "ppl-${opensearch_build}.jar", "protocol-${opensearch_build}.jar"]) + compileOnly "org.opensearch:common-utils:${opensearch_build}" compileOnly "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}" - compileOnly "org.opensearch:opensearch-job-scheduler-spi:${version}" + compileOnly "org.opensearch:opensearch-job-scheduler-spi:${opensearch_build}" // ZipArchive dependencies used for integration tests - zipArchive group: 'org.opensearch.plugin', name:'opensearch-ml-plugin', version: "${version}" - zipArchive group: 'org.opensearch.plugin', name:'opensearch-job-scheduler', version: "${version}" - zipArchive group: 'org.opensearch.plugin', name:'opensearch-anomaly-detection', version: "${version}" - zipArchive group: 'org.opensearch.plugin', name:'opensearch-sql-plugin', version: "${version}" - zipArchive group: 'org.opensearch.plugin', name:'opensearch-knn', version: "${version}" - zipArchive group: 'org.opensearch.plugin', name:'neural-search', version: "${version}" - zipArchive group: 'org.opensearch.plugin', name:'alerting', version: "${version}" + zipArchive group: 'org.opensearch.plugin', name:'opensearch-ml-plugin', version: "${opensearch_build}" + zipArchive group: 'org.opensearch.plugin', name:'opensearch-job-scheduler', version: "${opensearch_build}" + zipArchive group: 'org.opensearch.plugin', name:'opensearch-anomaly-detection', version: "${opensearch_build}" + zipArchive group: 'org.opensearch.plugin', name:'opensearch-sql-plugin', version: "${opensearch_build}" + zipArchive group: 'org.opensearch.plugin', name:'opensearch-knn', version: "${opensearch_build}" + zipArchive group: 'org.opensearch.plugin', name:'neural-search', version: "${opensearch_build}" + zipArchive group: 'org.opensearch.plugin', name:'alerting', version: "${opensearch_build}" // Test dependencies testImplementation "org.opensearch.test:framework:${opensearch_version}" From eb2fa1718886ff3fd4e47db2571da665b4d12498 Mon Sep 17 00:00:00 2001 From: Joshua Palis Date: Fri, 9 Feb 2024 15:41:23 -0800 Subject: [PATCH 064/119] Excludes integration test classes that attempt to delete system indices from running when security is enabled and adds security tests to CI (#224) Signed-off-by: Joshua Palis --- .github/workflows/test_security.yml | 43 +++++++++ build.gradle | 144 +++++++++++++++++++++++++++- 2 files changed, 185 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/test_security.yml diff --git a/.github/workflows/test_security.yml b/.github/workflows/test_security.yml new file mode 100644 index 00000000..2509d678 --- /dev/null +++ b/.github/workflows/test_security.yml @@ -0,0 +1,43 @@ +name: Run Security tests +on: + push: + branches-ignore: + - 'whitesource-remediate/**' + - 'backport/**' + pull_request: + types: [opened, synchronize, reopened] + +jobs: + Get-CI-Image-Tag: + uses: opensearch-project/opensearch-build/.github/workflows/get-ci-image-tag.yml@main + with: + product: opensearch + + integ-test-with-security-linux: + strategy: + matrix: + java: [11, 17, 21] + + name: Run Security Integration Tests on Linux + runs-on: ubuntu-latest + needs: Get-CI-Image-Tag + container: + # using the same image which is used by opensearch-build team to build the OpenSearch Distribution + # this image tag is subject to change as more dependencies and updates will arrive over time + image: ${{ needs.Get-CI-Image-Tag.outputs.ci-image-version-linux }} + # need to switch to root so that github actions can install runner binary on container without permission issues. + options: --user root + + steps: + - name: Checkout Skills + uses: actions/checkout@v3 + - name: Setup Java ${{ matrix.java }} + uses: actions/setup-java@v3 + with: + distribution: 'temurin' + java-version: ${{ matrix.java }} + - name: Run tests + # switching the user, as OpenSearch cluster can only be started as root/Administrator on linux-deb/linux-rpm/windows-zip. + run: | + chown -R 1000:1000 `pwd` + su `id -un 1000` -c "whoami && java -version && ./gradlew integTest -Dsecurity.enabled=true" \ No newline at end of file diff --git a/build.gradle b/build.gradle index 5f98257b..57711916 100644 --- a/build.gradle +++ b/build.gradle @@ -5,6 +5,8 @@ import org.opensearch.gradle.test.RestIntegTestTask import java.util.concurrent.Callable +import org.opensearch.gradle.testclusters.OpenSearchCluster +import java.nio.file.Paths import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin buildscript { @@ -43,6 +45,7 @@ plugins { id 'java-library' id 'com.diffplug.spotless' version '6.23.0' id "io.freefair.lombok" version "8.4" + id "de.undercouch.download" version "5.3.0" } lombok { @@ -81,6 +84,7 @@ def adJarDirectory = "$buildDir/dependencies/opensearch-anomaly-detection" configurations { zipArchive + secureIntegTestPluginArchive all { resolutionStrategy { force "org.mockito:mockito-core:${versions.mockito}" @@ -136,6 +140,7 @@ dependencies { zipArchive group: 'org.opensearch.plugin', name:'opensearch-knn', version: "${opensearch_build}" zipArchive group: 'org.opensearch.plugin', name:'neural-search', version: "${opensearch_build}" zipArchive group: 'org.opensearch.plugin', name:'alerting', version: "${opensearch_build}" + secureIntegTestPluginArchive group: 'org.opensearch.plugin', name:'opensearch-security', version: "${opensearch_build}" // Test dependencies testImplementation "org.opensearch.test:framework:${opensearch_version}" @@ -240,6 +245,14 @@ opensearchplugin { noticeFile rootProject.file("NOTICE") } +def opensearch_tmp_dir = rootProject.file('build/private/opensearch_tmp').absoluteFile +opensearch_tmp_dir.mkdirs() + +ext { + projectSubstitutions = [:] + isSnapshot = "true" == System.getProperty("build.snapshot", "true") +} + allprojects { // Default to the apache license project.ext.licenseName = 'The Apache Software License, Version 2.0' @@ -313,8 +326,7 @@ publishing { gradle.startParameter.setLogLevel(LogLevel.DEBUG) } -def opensearch_tmp_dir = rootProject.file('build/private/opensearch_tmp').absoluteFile -opensearch_tmp_dir.mkdirs() + def _numNodes = findProperty('numNodes') as Integer ?: 1 // Set up integration tests @@ -335,6 +347,35 @@ integTest { systemProperty "user", System.getProperty("user") systemProperty "password", System.getProperty("password") + systemProperty 'security.enabled', System.getProperty('security.enabled') + var is_https = System.getProperty("https") + var user = System.getProperty("user") + var password = System.getProperty("password") + + if (System.getProperty("security.enabled") != null) { + // If security is enabled, set is_https/user/password defaults + // admin password is permissable here since the security plugin is manually configured using the default internal_users.yml configuration + is_https = is_https == null ? "true" : is_https + user = user == null ? "admin" : user + password = password == null ? "admin" : password + System.setProperty("https", is_https) + System.setProperty("user", user) + System.setProperty("password", password) + } + + systemProperty("https", is_https) + systemProperty("user", user) + systemProperty("password", password) + + if (System.getProperty("https") != null && System.getProperty("https") == "true") { + filter { + excludeTestsMatching "org.opensearch.integTest.SearchAlertsToolIT" + excludeTestsMatching "org.opensearch.integTest.SearchAnomalyDetectorsToolIT" + excludeTestsMatching "org.opensearch.integTest.SearchAnomalyResultsToolIT" + excludeTestsMatching "org.opensearch.integTest.SearchMonitorsToolIT" + } + } + // doFirst delays this block until execution time doFirst { @@ -361,6 +402,96 @@ integTest { testClusters.integTest { testDistribution = "ARCHIVE" + // Optionally install security + if (System.getProperty("security.enabled") != null) { + configurations.secureIntegTestPluginArchive.asFileTree.each { + if(it.name.contains("opensearch-security")){ + plugin(provider(new Callable() { + @Override + RegularFile call() throws Exception { + return new RegularFile() { + @Override + File getAsFile() { + return it + } + } + } + })) + } + } + + getNodes().forEach { node -> + var creds = node.getCredentials() + // admin password is permissable here since the security plugin is manually configured using the default internal_users.yml configuration + if (creds.isEmpty()) { + creds.add(Map.of('username', 'admin', 'password', 'admin')) + } else { + creds.get(0).putAll(Map.of('username', 'admin', 'password', 'admin')) + } + } + + // Config below including files are copied from security demo configuration + ['esnode.pem', 'esnode-key.pem', 'root-ca.pem','kirk.pem','kirk-key.pem'].forEach { file -> + File local = Paths.get(opensearch_tmp_dir.absolutePath, file).toFile() + download.run { + src "https://raw.githubusercontent.com/opensearch-project/security/main/bwc-test/src/test/resources/security/" + file + dest local + overwrite false + } + } + + // // Config below including files are copied from security demo configuration + extraConfigFile("esnode.pem", file("$opensearch_tmp_dir/esnode.pem")) + extraConfigFile("esnode-key.pem", file("$opensearch_tmp_dir/esnode-key.pem")) + extraConfigFile("root-ca.pem", file("$opensearch_tmp_dir/root-ca.pem")) + + // This configuration is copied from the security plugins demo install: + // https://github.com/opensearch-project/security/blob/2.11.1.0/tools/install_demo_configuration.sh#L365-L388 + setting("plugins.security.ssl.transport.pemcert_filepath", "esnode.pem") + setting("plugins.security.ssl.transport.pemkey_filepath", "esnode-key.pem") + setting("plugins.security.ssl.transport.pemtrustedcas_filepath", "root-ca.pem") + setting("plugins.security.ssl.transport.enforce_hostname_verification", "false") + setting("plugins.security.ssl.http.enabled", "true") + setting("plugins.security.ssl.http.pemcert_filepath", "esnode.pem") + setting("plugins.security.ssl.http.pemkey_filepath", "esnode-key.pem") + setting("plugins.security.ssl.http.pemtrustedcas_filepath", "root-ca.pem") + setting("plugins.security.allow_unsafe_democertificates", "true") + setting("plugins.security.allow_default_init_securityindex", "true") + setting("plugins.security.unsupported.inject_user.enabled", "true") + + setting("plugins.security.authcz.admin_dn", "\n- CN=kirk,OU=client,O=client,L=test, C=de") + setting('plugins.security.restapi.roles_enabled', '["all_access", "security_rest_api_access"]') + setting('plugins.security.system_indices.enabled', "true") + setting('plugins.security.system_indices.indices', '[' + + '".plugins-ml-config", ' + + '".plugins-ml-connector", ' + + '".plugins-ml-model-group", ' + + '".plugins-ml-model", ".plugins-ml-task", ' + + '".plugins-ml-conversation-meta", ' + + '".plugins-ml-conversation-interactions", ' + + '".opendistro-alerting-config", ' + + '".opendistro-alerting-alert*", ' + + '".opendistro-anomaly-results*", ' + + '".opendistro-anomaly-detector*", ' + + '".opendistro-anomaly-checkpoints", ' + + '".opendistro-anomaly-detection-state", ' + + '".opendistro-reports-*", ' + + '".opensearch-notifications-*", ' + + '".opensearch-notebooks", ' + + '".opensearch-observability", ' + + '".ql-datasources", ' + + '".opendistro-asynchronous-search-response*", ' + + '".replication-metadata-store", ' + + '".opensearch-knn-models", ' + + '".geospatial-ip2geo-data*", ' + + '".plugins-flow-framework-config", ' + + '".plugins-flow-framework-templates", ' + + '".plugins-flow-framework-state"' + + ']' + ) + setSecure(true) + } + // Installs all registered zipArchive dependencies on integTest cluster nodes configurations.zipArchive.asFileTree.each { plugin(provider(new Callable(){ @@ -412,6 +543,15 @@ task integTestRemote(type: RestIntegTestTask) { includeTestsMatching "org.opensearch.integTest.*IT" } } + + if (System.getProperty("https") != null && System.getProperty("https") == "true") { + filter { + excludeTestsMatching "org.opensearch.integTest.SearchAlertsToolIT" + excludeTestsMatching "org.opensearch.integTest.SearchAnomalyDetectorsToolIT" + excludeTestsMatching "org.opensearch.integTest.SearchAnomalyResultsToolIT" + excludeTestsMatching "org.opensearch.integTest.SearchMonitorsToolIT" + } + } } // Automatically sets up the integration test cluster locally From 706fa85e4661f82d2334d101f42ca2725c5b7e5d Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Tue, 13 Feb 2024 09:29:33 -0800 Subject: [PATCH 065/119] Onboard search AD and search monitor tools to security IT (#225) (#226) (cherry picked from commit 4cecb3d9b20b19832a6fdbf86c2b57584447690f) Signed-off-by: Tyler Ohlsen Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- build.gradle | 8 +- .../integTest/BaseAgentToolsIT.java | 17 ++ .../SearchAnomalyDetectorsToolIT.java | 38 ++--- .../integTest/SearchMonitorsToolIT.java | 42 +++-- .../detectors_index_mappings.json | 157 ------------------ 5 files changed, 61 insertions(+), 201 deletions(-) delete mode 100644 src/test/resources/org/opensearch/agent/tools/anomaly-detection/detectors_index_mappings.json diff --git a/build.gradle b/build.gradle index 57711916..2504c8d8 100644 --- a/build.gradle +++ b/build.gradle @@ -367,12 +367,12 @@ integTest { systemProperty("user", user) systemProperty("password", password) + // Certain integ tests require system index manipulation to properly test. We exclude those + // in the security-enabled scenario since this action is prohibited by security plugin. if (System.getProperty("https") != null && System.getProperty("https") == "true") { filter { excludeTestsMatching "org.opensearch.integTest.SearchAlertsToolIT" - excludeTestsMatching "org.opensearch.integTest.SearchAnomalyDetectorsToolIT" excludeTestsMatching "org.opensearch.integTest.SearchAnomalyResultsToolIT" - excludeTestsMatching "org.opensearch.integTest.SearchMonitorsToolIT" } } @@ -544,12 +544,12 @@ task integTestRemote(type: RestIntegTestTask) { } } + // Certain integ tests require system index manipulation to properly test. We exclude those + // in the security-enabled scenario since this action is prohibited by security plugin. if (System.getProperty("https") != null && System.getProperty("https") == "true") { filter { excludeTestsMatching "org.opensearch.integTest.SearchAlertsToolIT" - excludeTestsMatching "org.opensearch.integTest.SearchAnomalyDetectorsToolIT" excludeTestsMatching "org.opensearch.integTest.SearchAnomalyResultsToolIT" - excludeTestsMatching "org.opensearch.integTest.SearchMonitorsToolIT" } } } diff --git a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java index cc7b2702..bb17b30f 100644 --- a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java +++ b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java @@ -126,6 +126,11 @@ protected String indexMonitor(String monitorAsJsonString) { return parseFieldFromResponse(response, "_id").toString(); } + protected void deleteMonitor(String monitorId) { + Response response = makeRequest(client(), "DELETE", "_plugins/_alerting/monitors/" + monitorId, null, (String) null, null); + assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + } + protected String indexDetector(String detectorAsJsonString) { Response response = makeRequest(client(), "POST", "_plugins/_anomaly_detection/detectors", null, detectorAsJsonString, null); @@ -133,6 +138,18 @@ protected String indexDetector(String detectorAsJsonString) { return parseFieldFromResponse(response, "_id").toString(); } + protected void deleteDetector(String detectorId) { + Response response = makeRequest( + client(), + "DELETE", + "_plugins/_anomaly_detection/detectors/" + detectorId, + null, + (String) null, + null + ); + assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + } + @SneakyThrows protected Map waitResponseMeetingCondition( String method, diff --git a/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java b/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java index 336d412a..750b2f48 100644 --- a/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java +++ b/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java @@ -11,8 +11,9 @@ import org.junit.After; import org.junit.Before; -import org.junit.jupiter.api.BeforeEach; -import org.opensearch.agent.tools.utils.ToolConstants; +import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.TestMethodOrder; import com.google.gson.Gson; import com.google.gson.JsonArray; @@ -20,16 +21,14 @@ import lombok.SneakyThrows; +@TestMethodOrder(OrderAnnotation.class) public class SearchAnomalyDetectorsToolIT extends BaseAgentToolsIT { private String registerAgentRequestBody; - private String detectorsIndexMappings; private String sampleDetector; private String sampleIndexMappings; private static final String detectorName = "foo-name"; private static final String registerAgentFilepath = "org/opensearch/agent/tools/anomaly-detection/register_flow_agent_of_search_anomaly_detectors_tool_request_body.json"; - private static final String detectorsIndexMappingsFilepath = - "org/opensearch/agent/tools/anomaly-detection/detectors_index_mappings.json"; private static final String sampleDetectorFilepath = "org/opensearch/agent/tools/anomaly-detection/sample_detector.json"; private static final String sampleIndexMappingsFilepath = "org/opensearch/agent/tools/anomaly-detection/sample_index_mappings.json"; @@ -38,27 +37,19 @@ public class SearchAnomalyDetectorsToolIT extends BaseAgentToolsIT { public void setUp() { super.setUp(); registerAgentRequestBody = Files.readString(Path.of(this.getClass().getClassLoader().getResource(registerAgentFilepath).toURI())); - detectorsIndexMappings = Files - .readString(Path.of(this.getClass().getClassLoader().getResource(detectorsIndexMappingsFilepath).toURI())); sampleDetector = Files.readString(Path.of(this.getClass().getClassLoader().getResource(sampleDetectorFilepath).toURI())); sampleIndexMappings = Files.readString(Path.of(this.getClass().getClassLoader().getResource(sampleIndexMappingsFilepath).toURI())); } - @BeforeEach - @SneakyThrows - public void prepareTest() { - deleteSystemIndices(); - } - @After @SneakyThrows public void tearDown() { super.tearDown(); deleteExternalIndices(); - deleteSystemIndices(); } @SneakyThrows + @Order(1) public void testSearchAnomalyDetectorsToolInFlowAgent_withNoSystemIndex() { String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"detectorName\": \"" + detectorName + "\"}}"; @@ -67,19 +58,20 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_withNoSystemIndex() { } @SneakyThrows + @Order(2) public void testSearchAnomalyDetectorsToolInFlowAgent_noMatching() { - setupADSystemIndices(); setupTestDetectionIndex("test-index"); - ingestSampleDetector(detectorName, "test-index"); + String detectorId = ingestSampleDetector(detectorName, "test-index"); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"detectorName\": \"" + detectorName + "foo" + "\"}}"; String result = executeAgent(agentId, agentInput); assertEquals("AnomalyDetectors=[]TotalAnomalyDetectors=0", result); + deleteDetector(detectorId); } @SneakyThrows + @Order(3) public void testSearchAnomalyDetectorsToolInFlowAgent_matching() { - setupADSystemIndices(); setupTestDetectionIndex("test-index"); String detectorId = ingestSampleDetector(detectorName, "test-index"); String agentId = createAgent(registerAgentRequestBody); @@ -88,14 +80,15 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_matching() { assertTrue(result.contains(String.format("id=%s", detectorId))); assertTrue(result.contains(String.format("name=%s", detectorName))); assertTrue(result.contains(String.format("TotalAnomalyDetectors=%d", 1))); + deleteDetector(detectorId); } @SneakyThrows + @Order(4) public void testSearchAnomalyDetectorsToolInFlowAgent_complexParams() { - setupADSystemIndices(); setupTestDetectionIndex("test-index"); String detectorId = ingestSampleDetector(detectorName, "test-index"); - ingestSampleDetector(detectorName + "foo", "test-index"); + String detectorIdFoo = ingestSampleDetector(detectorName + "foo", "test-index"); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"detectorName\": \"" + detectorName @@ -104,11 +97,8 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_complexParams() { assertTrue(result.contains(String.format("id=%s", detectorId))); assertTrue(result.contains(String.format("name=%s", detectorName))); assertTrue(result.contains(String.format("TotalAnomalyDetectors=%d", 1))); - } - - @SneakyThrows - private void setupADSystemIndices() { - createIndexWithConfiguration(ToolConstants.AD_DETECTORS_INDEX, detectorsIndexMappings); + deleteDetector(detectorId); + deleteDetector(detectorIdFoo); } @SneakyThrows diff --git a/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java b/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java index e14fbca8..cd3eca09 100644 --- a/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java +++ b/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java @@ -12,7 +12,9 @@ import org.junit.After; import org.junit.Before; -import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.TestMethodOrder; import com.google.gson.Gson; import com.google.gson.JsonObject; @@ -21,6 +23,7 @@ import lombok.extern.log4j.Log4j2; @Log4j2 +@TestMethodOrder(OrderAnnotation.class) public class SearchMonitorsToolIT extends BaseAgentToolsIT { private String registerAgentRequestBody; private String sampleMonitor; @@ -38,21 +41,15 @@ public void setUp() { sampleMonitor = Files.readString(Path.of(this.getClass().getClassLoader().getResource(sampleMonitorFilepath).toURI())); } - @BeforeEach - @SneakyThrows - public void prepareTest() { - deleteSystemIndices(); - } - @After @SneakyThrows public void tearDown() { super.tearDown(); deleteExternalIndices(); - deleteSystemIndices(); } @SneakyThrows + @Order(1) public void testSearchMonitorsToolInFlowAgent_withNoSystemIndex() { String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"monitorName\": \"" + monitorName + "\"}}"; @@ -61,29 +58,33 @@ public void testSearchMonitorsToolInFlowAgent_withNoSystemIndex() { } @SneakyThrows + @Order(2) public void testSearchMonitorsToolInFlowAgent_searchById() { String monitorId = ingestSampleMonitor(monitorName, true); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"monitorId\": \"" + monitorId + "\"}}"; - String result = executeAgent(agentId, agentInput); assertTrue(result.contains(String.format("name=%s", monitorName))); assertTrue(result.contains("TotalMonitors=1")); + deleteMonitor(monitorId); } @SneakyThrows + @Order(3) public void testSearchMonitorsToolInFlowAgent_singleMonitor_noFilter() { - ingestSampleMonitor(monitorName, true); + String monitorId = ingestSampleMonitor(monitorName, true); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{}}"; String result = executeAgent(agentId, agentInput); assertTrue(result.contains(String.format("name=%s", monitorName))); assertTrue(result.contains("TotalMonitors=1")); + deleteMonitor(monitorId); } @SneakyThrows + @Order(4) public void testSearchMonitorsToolInFlowAgent_singleMonitor_filter() { String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"monitorId\": \"" + "foo-id" + "\"}}"; @@ -92,9 +93,10 @@ public void testSearchMonitorsToolInFlowAgent_singleMonitor_filter() { } @SneakyThrows + @Order(5) public void testSearchMonitorsToolInFlowAgent_multipleMonitors_noFilter() { - ingestSampleMonitor(monitorName, true); - ingestSampleMonitor(monitorName2, false); + String monitorId1 = ingestSampleMonitor(monitorName, true); + String monitorId2 = ingestSampleMonitor(monitorName2, false); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{}}"; @@ -104,12 +106,15 @@ public void testSearchMonitorsToolInFlowAgent_multipleMonitors_noFilter() { assertTrue(result.contains("enabled=true")); assertTrue(result.contains("enabled=false")); assertTrue(result.contains("TotalMonitors=2")); + deleteMonitor(monitorId1); + deleteMonitor(monitorId2); } @SneakyThrows + @Order(6) public void testSearchMonitorsToolInFlowAgent_multipleMonitors_filter() { - ingestSampleMonitor(monitorName, true); - ingestSampleMonitor(monitorName2, false); + String monitorId1 = ingestSampleMonitor(monitorName, true); + String monitorId2 = ingestSampleMonitor(monitorName2, false); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"monitorName\": \"" + monitorName + "\"}}"; @@ -118,12 +123,15 @@ public void testSearchMonitorsToolInFlowAgent_multipleMonitors_filter() { assertFalse(result.contains(String.format("name=%s", monitorName2))); assertTrue(result.contains("enabled=true")); assertTrue(result.contains("TotalMonitors=1")); + deleteMonitor(monitorId1); + deleteMonitor(monitorId2); } @SneakyThrows + @Order(7) public void testSearchMonitorsToolInFlowAgent_multipleMonitors_complexParams() { - ingestSampleMonitor(monitorName, true); - ingestSampleMonitor(monitorName2, false); + String monitorId1 = ingestSampleMonitor(monitorName, true); + String monitorId2 = ingestSampleMonitor(monitorName2, false); String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"monitorName\": \"" @@ -131,6 +139,8 @@ public void testSearchMonitorsToolInFlowAgent_multipleMonitors_complexParams() { + "\", \"enabled\": true, \"hasTriggers\": false, \"sortOrder\": \"asc\", \"sortString\": \"monitor.name.keyword\", \"size\": 10, \"startIndex\": 0 }}"; String result = executeAgent(agentId, agentInput); assertTrue(result.contains("TotalMonitors=1")); + deleteMonitor(monitorId1); + deleteMonitor(monitorId2); } private String ingestSampleMonitor(String monitorName, boolean enabled) { diff --git a/src/test/resources/org/opensearch/agent/tools/anomaly-detection/detectors_index_mappings.json b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/detectors_index_mappings.json deleted file mode 100644 index 561f30ce..00000000 --- a/src/test/resources/org/opensearch/agent/tools/anomaly-detection/detectors_index_mappings.json +++ /dev/null @@ -1,157 +0,0 @@ -{ - "mappings": { - "dynamic": "false", - "_meta": { - "schema_version": 5 - }, - "properties": { - "category_field": { - "type": "keyword" - }, - "description": { - "type": "text" - }, - "detection_interval": { - "properties": { - "period": { - "properties": { - "interval": { - "type": "integer" - }, - "unit": { - "type": "keyword" - } - } - } - } - }, - "detector_type": { - "type": "keyword" - }, - "feature_attributes": { - "type": "nested", - "properties": { - "aggregation_query": { - "type": "object", - "enabled": false - }, - "feature_enabled": { - "type": "boolean" - }, - "feature_id": { - "type": "keyword", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - }, - "feature_name": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - } - } - }, - "filter_query": { - "type": "object", - "enabled": false - }, - "indices": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - }, - "last_update_time": { - "type": "date", - "format": "strict_date_time||epoch_millis" - }, - "name": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - }, - "result_index": { - "type": "keyword" - }, - "schema_version": { - "type": "integer" - }, - "shingle_size": { - "type": "integer" - }, - "time_field": { - "type": "keyword" - }, - "ui_metadata": { - "type": "object", - "enabled": false - }, - "user": { - "type": "nested", - "properties": { - "backend_roles": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword" - } - } - }, - "custom_attribute_names": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword" - } - } - }, - "name": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - }, - "roles": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword" - } - } - } - } - }, - "window_delay": { - "properties": { - "period": { - "properties": { - "interval": { - "type": "integer" - }, - "unit": { - "type": "keyword" - } - } - } - } - } - } - } -} \ No newline at end of file From d1877c9db6644abd65e28d782ac2429450d3e645 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Thu, 15 Feb 2024 15:14:32 -0800 Subject: [PATCH 066/119] fix VisualizationsToolIT (#229) (#230) (cherry picked from commit 919f45525f1adfc9439db29afcdd3035731c8933) Signed-off-by: Jing Zhang Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- src/test/java/org/opensearch/integTest/PromptHandler.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/org/opensearch/integTest/PromptHandler.java b/src/test/java/org/opensearch/integTest/PromptHandler.java index a3f9314d..0ef03501 100644 --- a/src/test/java/org/opensearch/integTest/PromptHandler.java +++ b/src/test/java/org/opensearch/integTest/PromptHandler.java @@ -23,7 +23,7 @@ LLMThought llmThought() { } String response(String prompt) { - if (prompt.contains("TOOL RESPONSE: ")) { + if (prompt.contains("Human: TOOL RESPONSE ")) { return "```json{\n" + " \"thought\": \"Thought: Now I know the final answer\",\n" + " \"final_answer\": \"final answer\"\n" From 0d4bdd9606310e55554bbece4d7e54dbf5119382 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Mon, 19 Feb 2024 17:30:49 -0800 Subject: [PATCH 067/119] Fix SearchAnomalyDetectorsTool indices param bug; add more IT (#233) (#234) (cherry picked from commit bd510a5e928840cc1e81b5a5f27a3ef3486ce486) Signed-off-by: Tyler Ohlsen Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../tools/SearchAnomalyDetectorsTool.java | 2 +- .../SearchAnomalyDetectorsToolIT.java | 142 ++++++++++++++---- ...nt_of_search_alerts_tool_request_body.json | 3 +- ..._of_search_monitors_tool_request_body.json | 3 +- ...h_anomaly_detectors_tool_request_body.json | 3 +- ...rch_anomaly_results_tool_request_body.json | 3 +- 6 files changed, 116 insertions(+), 40 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java index a94b92f6..b3ef29e1 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java @@ -116,7 +116,7 @@ public void run(Map parameters, ActionListener listener) mustList.add(new WildcardQueryBuilder("name.keyword", detectorNamePattern)); } if (indices != null) { - mustList.add(new TermQueryBuilder("indices", indices)); + mustList.add(new TermQueryBuilder("indices.keyword", indices)); } if (highCardinality != null) { mustList.add(new TermQueryBuilder("detector_type", highCardinality ? "MULTI_ENTITY" : "SINGLE_ENTITY")); diff --git a/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java b/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java index 750b2f48..2390a108 100644 --- a/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java +++ b/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java @@ -59,46 +59,126 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_withNoSystemIndex() { @SneakyThrows @Order(2) - public void testSearchAnomalyDetectorsToolInFlowAgent_noMatching() { - setupTestDetectionIndex("test-index"); - String detectorId = ingestSampleDetector(detectorName, "test-index"); - String agentId = createAgent(registerAgentRequestBody); - String agentInput = "{\"parameters\":{\"detectorName\": \"" + detectorName + "foo" + "\"}}"; - String result = executeAgent(agentId, agentInput); - assertEquals("AnomalyDetectors=[]TotalAnomalyDetectors=0", result); - deleteDetector(detectorId); + public void testSearchAnomalyDetectorsToolInFlowAgent_detectorNameParam() { + String detectorId = ""; + try { + setupTestDetectionIndex("test-index"); + detectorId = ingestSampleDetector(detectorName, "test-index"); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"detectorName\": \"" + detectorName + "foo" + "\"}}"; + String result = executeAgent(agentId, agentInput); + assertEquals("AnomalyDetectors=[]TotalAnomalyDetectors=0", result); + + String agentInput2 = "{\"parameters\":{\"detectorName\": \"" + detectorName + "\"}}"; + String result2 = executeAgent(agentId, agentInput2); + assertTrue(result2.contains(String.format("id=%s", detectorId))); + assertTrue(result2.contains(String.format("name=%s", detectorName))); + assertTrue(result2.contains(String.format("TotalAnomalyDetectors=%d", 1))); + } finally { + if (detectorId != null) { + deleteDetector(detectorId); + } + } } @SneakyThrows @Order(3) - public void testSearchAnomalyDetectorsToolInFlowAgent_matching() { - setupTestDetectionIndex("test-index"); - String detectorId = ingestSampleDetector(detectorName, "test-index"); - String agentId = createAgent(registerAgentRequestBody); - String agentInput = "{\"parameters\":{\"detectorName\": \"" + detectorName + "\"}}"; - String result = executeAgent(agentId, agentInput); - assertTrue(result.contains(String.format("id=%s", detectorId))); - assertTrue(result.contains(String.format("name=%s", detectorName))); - assertTrue(result.contains(String.format("TotalAnomalyDetectors=%d", 1))); - deleteDetector(detectorId); + public void testSearchAnomalyDetectorsToolInFlowAgent_detectorNamePatternParam() { + String detectorId = ""; + try { + setupTestDetectionIndex("test-index"); + detectorId = ingestSampleDetector(detectorName, "test-index"); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"detectorNamePattern\": \"" + detectorName + "foo" + "\"}}"; + String result = executeAgent(agentId, agentInput); + assertEquals("AnomalyDetectors=[]TotalAnomalyDetectors=0", result); + + String agentInput2 = "{\"parameters\":{\"detectorNamePattern\": \"" + detectorName + "*" + "\"}}"; + String result2 = executeAgent(agentId, agentInput2); + assertTrue(result2.contains(String.format("id=%s", detectorId))); + assertTrue(result2.contains(String.format("name=%s", detectorName))); + assertTrue(result2.contains(String.format("TotalAnomalyDetectors=%d", 1))); + } finally { + if (detectorId != null) { + deleteDetector(detectorId); + } + } + } @SneakyThrows @Order(4) + public void testSearchAnomalyDetectorsToolInFlowAgent_indicesParam() { + String detectorId = ""; + try { + setupTestDetectionIndex("test-index"); + detectorId = ingestSampleDetector(detectorName, "test-index"); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"indices\": \"test-index-foo\"}}"; + String result = executeAgent(agentId, agentInput); + assertEquals("AnomalyDetectors=[]TotalAnomalyDetectors=0", result); + + String agentInput2 = "{\"parameters\":{\"indices\": \"test-index\"}}"; + String result2 = executeAgent(agentId, agentInput2); + assertTrue(result2.contains(String.format("TotalAnomalyDetectors=%d", 1))); + } finally { + if (detectorId != null) { + deleteDetector(detectorId); + } + } + + } + + @SneakyThrows + @Order(5) + public void testSearchAnomalyDetectorsToolInFlowAgent_highCardinalityParam() { + String detectorId = ""; + try { + setupTestDetectionIndex("test-index"); + detectorId = ingestSampleDetector(detectorName, "test-index"); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"highCardinality\": \"true\"}}"; + String result = executeAgent(agentId, agentInput); + assertEquals("AnomalyDetectors=[]TotalAnomalyDetectors=0", result); + + String agentInput2 = "{\"parameters\":{\"highCardinality\": \"false\"}}"; + String result2 = executeAgent(agentId, agentInput2); + assertTrue(result2.contains(String.format("id=%s", detectorId))); + assertTrue(result2.contains(String.format("name=%s", detectorName))); + assertTrue(result2.contains(String.format("TotalAnomalyDetectors=%d", 1))); + } finally { + if (detectorId != null) { + deleteDetector(detectorId); + } + } + + } + + @SneakyThrows + @Order(6) public void testSearchAnomalyDetectorsToolInFlowAgent_complexParams() { - setupTestDetectionIndex("test-index"); - String detectorId = ingestSampleDetector(detectorName, "test-index"); - String detectorIdFoo = ingestSampleDetector(detectorName + "foo", "test-index"); - String agentId = createAgent(registerAgentRequestBody); - String agentInput = "{\"parameters\":{\"detectorName\": \"" - + detectorName - + "\", \"highCardinality\": false, \"sortOrder\": \"asc\", \"sortString\": \"name.keyword\", \"size\": 10, \"startIndex\": 0 }}"; - String result = executeAgent(agentId, agentInput); - assertTrue(result.contains(String.format("id=%s", detectorId))); - assertTrue(result.contains(String.format("name=%s", detectorName))); - assertTrue(result.contains(String.format("TotalAnomalyDetectors=%d", 1))); - deleteDetector(detectorId); - deleteDetector(detectorIdFoo); + String detectorId = null; + String detectorIdFoo = null; + try { + setupTestDetectionIndex("test-index"); + detectorId = ingestSampleDetector(detectorName, "test-index"); + detectorIdFoo = ingestSampleDetector(detectorName + "foo", "test-index"); + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"detectorName\": \"" + + detectorName + + "\", \"highCardinality\": false, \"sortOrder\": \"asc\", \"sortString\": \"name.keyword\", \"size\": 10, \"startIndex\": 0 }}"; + String result = executeAgent(agentId, agentInput); + assertTrue(result.contains(String.format("id=%s", detectorId))); + assertTrue(result.contains(String.format("name=%s", detectorName))); + assertTrue(result.contains(String.format("TotalAnomalyDetectors=%d", 1))); + } finally { + if (detectorId != null) { + deleteDetector(detectorId); + } + if (detectorIdFoo != null) { + deleteDetector(detectorIdFoo); + } + } } @SneakyThrows diff --git a/src/test/resources/org/opensearch/agent/tools/alerting/register_flow_agent_of_search_alerts_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/alerting/register_flow_agent_of_search_alerts_tool_request_body.json index 80d1146b..71abfc78 100644 --- a/src/test/resources/org/opensearch/agent/tools/alerting/register_flow_agent_of_search_alerts_tool_request_body.json +++ b/src/test/resources/org/opensearch/agent/tools/alerting/register_flow_agent_of_search_alerts_tool_request_body.json @@ -3,8 +3,7 @@ "type": "flow", "tools": [ { - "type": "SearchAlertsTool", - "description": "Use this tool to search alerts." + "type": "SearchAlertsTool" } ] } \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/alerting/register_flow_agent_of_search_monitors_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/alerting/register_flow_agent_of_search_monitors_tool_request_body.json index d38d955f..200203fd 100644 --- a/src/test/resources/org/opensearch/agent/tools/alerting/register_flow_agent_of_search_monitors_tool_request_body.json +++ b/src/test/resources/org/opensearch/agent/tools/alerting/register_flow_agent_of_search_monitors_tool_request_body.json @@ -3,8 +3,7 @@ "type": "flow", "tools": [ { - "type": "SearchMonitorsTool", - "description": "Use this tool to search alerting monitors." + "type": "SearchMonitorsTool" } ] } \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/anomaly-detection/register_flow_agent_of_search_anomaly_detectors_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/register_flow_agent_of_search_anomaly_detectors_tool_request_body.json index fc362709..b65eb44e 100644 --- a/src/test/resources/org/opensearch/agent/tools/anomaly-detection/register_flow_agent_of_search_anomaly_detectors_tool_request_body.json +++ b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/register_flow_agent_of_search_anomaly_detectors_tool_request_body.json @@ -3,8 +3,7 @@ "type": "flow", "tools": [ { - "type": "SearchAnomalyDetectorsTool", - "description": "Use this tool to search anomaly detectors." + "type": "SearchAnomalyDetectorsTool" } ] } \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/anomaly-detection/register_flow_agent_of_search_anomaly_results_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/register_flow_agent_of_search_anomaly_results_tool_request_body.json index 710a2518..03e2f354 100644 --- a/src/test/resources/org/opensearch/agent/tools/anomaly-detection/register_flow_agent_of_search_anomaly_results_tool_request_body.json +++ b/src/test/resources/org/opensearch/agent/tools/anomaly-detection/register_flow_agent_of_search_anomaly_results_tool_request_body.json @@ -3,8 +3,7 @@ "type": "flow", "tools": [ { - "type": "SearchAnomalyResultsTool", - "description": "Use this tool to search anomaly results." + "type": "SearchAnomalyResultsTool" } ] } \ No newline at end of file From f3f2317c70ad92e51b46af3fe9b967d03164a739 Mon Sep 17 00:00:00 2001 From: Tyler Ohlsen Date: Mon, 26 Feb 2024 09:05:39 -0800 Subject: [PATCH 068/119] [Backport 2.x] Fix detector state params in SearchAnomalyDetectorsTool (#235) (#238) Signed-off-by: Tyler Ohlsen --- .../java/org/opensearch/agent/ToolPlugin.java | 4 +- .../tools/SearchAnomalyDetectorsTool.java | 51 ++++++++---- .../agent/tools/SearchAnomalyResultsTool.java | 14 ++-- .../org/opensearch/agent/TestHelpers.java | 6 +- .../SearchAnomalyDetectorsToolTests.java | 44 +++++----- .../tools/SearchAnomalyResultsToolTests.java | 14 +--- .../integTest/BaseAgentToolsIT.java | 24 ++++++ .../SearchAnomalyDetectorsToolIT.java | 83 ++++++++++++++++++- .../integTest/SearchAnomalyResultsToolIT.java | 23 +++-- 9 files changed, 187 insertions(+), 76 deletions(-) diff --git a/src/main/java/org/opensearch/agent/ToolPlugin.java b/src/main/java/org/opensearch/agent/ToolPlugin.java index e78b2550..1db7915e 100644 --- a/src/main/java/org/opensearch/agent/ToolPlugin.java +++ b/src/main/java/org/opensearch/agent/ToolPlugin.java @@ -69,8 +69,8 @@ public Collection createComponents( SearchIndexTool.Factory.getInstance().init(client, xContentRegistry); RAGTool.Factory.getInstance().init(client, xContentRegistry); SearchAlertsTool.Factory.getInstance().init(client); - SearchAnomalyDetectorsTool.Factory.getInstance().init(client); - SearchAnomalyResultsTool.Factory.getInstance().init(client); + SearchAnomalyDetectorsTool.Factory.getInstance().init(client, namedWriteableRegistry); + SearchAnomalyResultsTool.Factory.getInstance().init(client, namedWriteableRegistry); SearchMonitorsTool.Factory.getInstance().init(client); return Collections.emptyList(); } diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java index b3ef29e1..e4a8e89d 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java @@ -8,6 +8,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; @@ -26,6 +27,7 @@ import org.opensearch.client.Client; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; @@ -49,7 +51,7 @@ public class SearchAnomalyDetectorsTool implements Tool { public static final String TYPE = "SearchAnomalyDetectorsTool"; private static final String DEFAULT_DESCRIPTION = - "This is a tool that searches anomaly detectors. It takes 12 optional arguments named detectorName which is the explicit name of the monitor (default is null), and detectorNamePattern which is a wildcard query to match detector name (default is null), and indices which defines the index or index pattern the detector is detecting over (default is null), and highCardinality which defines whether the anomaly detector is high cardinality (synonymous with multi-entity) of non-high-cardinality (synonymous with single-entity) (default is null, indicating both), and lastUpdateTime which defines the latest update time of the anomaly detector in epoch milliseconds (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the paginated index to start from (default is 0), and running which defines whether the anomaly detector is running (default is null, indicating both), and disabled which defines whether the anomaly detector is disabled (default is null, indicating both), and failed which defines whether the anomaly detector has failed (default is null, indicating both). The tool returns 2 values: a list of anomaly detectors (each containing the detector id, detector name, detector type indicating multi-entity or single-entity (where multi-entity also means high-cardinality), detector description, name of the configured index, last update time in epoch milliseconds), and the total number of anomaly detectors."; + "This is a tool that searches anomaly detectors. It takes 12 optional arguments named detectorName which is the explicit name of the detector (default is null), and detectorNamePattern which is a wildcard query to match detector name (default is null), and indices which defines the index or index pattern the detector is detecting over (default is null), and highCardinality which defines whether the anomaly detector is high cardinality (synonymous with multi-entity) of non-high-cardinality (synonymous with single-entity) (default is null, indicating both), and lastUpdateTime which defines the latest update time of the anomaly detector in epoch milliseconds (default is null), and sortOrder which defines the order of the results (options are asc or desc, and default is asc), and sortString which defines how to sort the results (default is name.keyword), and size which defines the size of the request to be returned (default is 20), and startIndex which defines the paginated index to start from (default is 0), and running which defines whether the anomaly detector is running (default is null, indicating both), and failed which defines whether the anomaly detector has failed (default is null, indicating both). The tool returns 2 values: a list of anomaly detectors (each containing the detector id, detector name, detector type indicating multi-entity or single-entity (where multi-entity also means high-cardinality), detector description, name of the configured index, last update time in epoch milliseconds), and the total number of anomaly detectors."; @Setter @Getter @@ -70,9 +72,9 @@ public class SearchAnomalyDetectorsTool implements Tool { @Setter private Parser outputParser; - public SearchAnomalyDetectorsTool(Client client) { + public SearchAnomalyDetectorsTool(Client client, NamedWriteableRegistry namedWriteableRegistry) { this.client = client; - this.adClient = new AnomalyDetectionNodeClient(client); + this.adClient = new AnomalyDetectionNodeClient(client, namedWriteableRegistry); // probably keep this overridden output parser. need to ensure the output matches what's expected outputParser = new Parser<>() { @@ -105,7 +107,6 @@ public void run(Map parameters, ActionListener listener) final int size = parameters.containsKey("size") ? Integer.parseInt(parameters.get("size")) : 20; final int startIndex = parameters.containsKey("startIndex") ? Integer.parseInt(parameters.get("startIndex")) : 0; final Boolean running = parameters.containsKey("running") ? Boolean.parseBoolean(parameters.get("running")) : null; - final Boolean disabled = parameters.containsKey("disabled") ? Boolean.parseBoolean(parameters.get("disabled")) : null; final Boolean failed = parameters.containsKey("failed") ? Boolean.parseBoolean(parameters.get("failed")) : null; List mustList = new ArrayList(); @@ -139,10 +140,16 @@ public void run(Map parameters, ActionListener listener) ActionListener searchDetectorListener = ActionListener.wrap(response -> { StringBuilder sb = new StringBuilder(); List hits = Arrays.asList(response.getHits().getHits()); - Map hitsAsMap = hits.stream().collect(Collectors.toMap(SearchHit::getId, hit -> hit)); + Map hitsAsMap = new HashMap<>(); + // We persist the hits map using detector name as the key. Note this is required to be unique from the AD plugin. + // We cannot use detector ID, because the detector in the response from the profile transport action does not include this, + // making it difficult to map potential hits that should be removed later on based on the profile response's detector state. + for (SearchHit hit : hits) { + hitsAsMap.put((String) hit.getSourceAsMap().get("name"), hit); + } // If we need to filter by detector state, make subsequent profile API calls to each detector - if (running != null || disabled != null || failed != null) { + if (running != null || failed != null) { List> profileFutures = new ArrayList<>(); for (SearchHit hit : hits) { CompletableFuture profileFuture = new CompletableFuture() @@ -183,7 +190,7 @@ public void run(Map parameters, ActionListener listener) for (GetAnomalyDetectorResponse profileResponse : profileResponses) { if (profileResponse != null && profileResponse.getDetector() != null) { - String detectorId = profileResponse.getDetector().getDetectorId(); + String responseDetectorName = profileResponse.getDetector().getName(); // We follow the existing logic as the frontend to determine overall detector state // https://github.com/opensearch-project/anomaly-detection-dashboards-plugin/blob/main/server/routes/utils/adHelpers.ts#L437 @@ -192,9 +199,7 @@ public void run(Map parameters, ActionListener listener) if (realtimeTask != null) { String taskState = realtimeTask.getState(); - if (taskState.equalsIgnoreCase("CREATED")) { - detectorState = DetectorStateString.Initializing.name(); - } else if (taskState.equalsIgnoreCase("RUNNING")) { + if (taskState.equalsIgnoreCase("CREATED") || taskState.equalsIgnoreCase("RUNNING")) { detectorState = DetectorStateString.Running.name(); } else if (taskState.equalsIgnoreCase("INIT_FAILURE") || taskState.equalsIgnoreCase("UNEXPECTED_FAILURE") @@ -203,12 +208,21 @@ public void run(Map parameters, ActionListener listener) } } - if ((Boolean.FALSE.equals(running) && detectorState.equals(DetectorStateString.Running.name())) - || (Boolean.FALSE.equals(disabled) && detectorState.equals(DetectorStateString.Disabled.name())) - || (Boolean.FALSE.equals(failed) && detectorState.equals(DetectorStateString.Failed.name()))) { - hitsAsMap.remove(detectorId); + boolean includeRunning = running != null && running == true; + boolean includeFailed = failed != null && failed == true; + boolean isValid = true; + + if (detectorState.equals(DetectorStateString.Running.name())) { + isValid = (running == null || running == true) && !(includeFailed && running == null); + } else if (detectorState.equals(DetectorStateString.Failed.name())) { + isValid = (failed == null || failed == true) && !(includeRunning && failed == null); + } else if (detectorState.equals(DetectorStateString.Disabled.name())) { + isValid = (running == null || running == false) && !(includeFailed && running == null); } + if (!isValid) { + hitsAsMap.remove(responseDetectorName); + } } } } @@ -262,6 +276,8 @@ private void processHits(Map hitsAsMap, ActionListener public static class Factory implements Tool.Factory { private Client client; + private NamedWriteableRegistry namedWriteableRegistry; + private AnomalyDetectionNodeClient adClient; private static Factory INSTANCE; @@ -286,14 +302,15 @@ public static Factory getInstance() { * Initialize this factory * @param client The OpenSearch client */ - public void init(Client client) { + public void init(Client client, NamedWriteableRegistry namedWriteableRegistry) { this.client = client; - this.adClient = new AnomalyDetectionNodeClient(client); + this.namedWriteableRegistry = namedWriteableRegistry; + this.adClient = new AnomalyDetectionNodeClient(client, namedWriteableRegistry); } @Override public SearchAnomalyDetectorsTool create(Map map) { - return new SearchAnomalyDetectorsTool(client); + return new SearchAnomalyDetectorsTool(client, namedWriteableRegistry); } @Override diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java index 832c6297..a2973d6b 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyResultsTool.java @@ -16,6 +16,7 @@ import org.opensearch.agent.tools.utils.ToolConstants; import org.opensearch.client.Client; import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.ExistsQueryBuilder; @@ -61,9 +62,9 @@ public class SearchAnomalyResultsTool implements Tool { @Setter private Parser outputParser; - public SearchAnomalyResultsTool(Client client) { + public SearchAnomalyResultsTool(Client client, NamedWriteableRegistry namedWriteableRegistry) { this.client = client; - this.adClient = new AnomalyDetectionNodeClient(client); + this.adClient = new AnomalyDetectionNodeClient(client, namedWriteableRegistry); // probably keep this overridden output parser. need to ensure the output matches what's expected outputParser = new Parser<>() { @@ -190,6 +191,8 @@ private void processHits(SearchHits searchHits, ActionListener listener) public static class Factory implements Tool.Factory { private Client client; + private NamedWriteableRegistry namedWriteableRegistry; + private AnomalyDetectionNodeClient adClient; private static Factory INSTANCE; @@ -214,14 +217,15 @@ public static Factory getInstance() { * Initialize this factory * @param client The OpenSearch client */ - public void init(Client client) { + public void init(Client client, NamedWriteableRegistry namedWriteableRegistry) { this.client = client; - this.adClient = new AnomalyDetectionNodeClient(client); + this.namedWriteableRegistry = namedWriteableRegistry; + this.adClient = new AnomalyDetectionNodeClient(client, namedWriteableRegistry); } @Override public SearchAnomalyResultsTool create(Map map) { - return new SearchAnomalyResultsTool(client); + return new SearchAnomalyResultsTool(client, namedWriteableRegistry); } @Override diff --git a/src/test/java/org/opensearch/agent/TestHelpers.java b/src/test/java/org/opensearch/agent/TestHelpers.java index 847bc151..422808de 100644 --- a/src/test/java/org/opensearch/agent/TestHelpers.java +++ b/src/test/java/org/opensearch/agent/TestHelpers.java @@ -41,10 +41,10 @@ public static SearchResponse generateSearchResponse(SearchHit[] hits) { ); } - public static GetAnomalyDetectorResponse generateGetAnomalyDetectorResponses(String[] detectorIds, String[] detectorStates) { + public static GetAnomalyDetectorResponse generateGetAnomalyDetectorResponses(String[] detectorNames, String[] detectorStates) { AnomalyDetector detector = Mockito.mock(AnomalyDetector.class); - // For each subsequent call to getDetectorId(), return the next detectorId in the array - when(detector.getDetectorId()).thenReturn(detectorIds[0], Arrays.copyOfRange(detectorIds, 1, detectorIds.length)); + // For each subsequent call to getId(), return the next detectorId in the array + when(detector.getName()).thenReturn(detectorNames[0], Arrays.copyOfRange(detectorNames, 1, detectorNames.length)); ADTask realtimeAdTask = Mockito.mock(ADTask.class); // For each subsequent call to getState(), return the next detectorState in the array when(realtimeAdTask.getState()).thenReturn(detectorStates[0], Arrays.copyOfRange(detectorStates, 1, detectorStates.length)); diff --git a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java index 13213f85..8bf4de56 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java @@ -42,11 +42,14 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.action.ActionListener; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.ml.common.spi.tools.Tool; import org.opensearch.search.SearchHit; public class SearchAnomalyDetectorsToolTests { + @Mock + private NamedWriteableRegistry namedWriteableRegistry; @Mock private NodeClient nodeClient; @@ -59,7 +62,7 @@ public class SearchAnomalyDetectorsToolTests { @Before public void setup() { MockitoAnnotations.openMocks(this); - SearchAnomalyDetectorsTool.Factory.getInstance().init(nodeClient); + SearchAnomalyDetectorsTool.Factory.getInstance().init(nodeClient, namedWriteableRegistry); nullParams = null; emptyParams = Collections.emptyMap(); @@ -152,7 +155,7 @@ public void testRunWithRunningDetectorTrue() throws Exception { hits[0] = TestHelpers.generateSearchDetectorHit(detectorName, detectorId); SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers - .generateGetAnomalyDetectorResponses(new String[] { detectorId }, new String[] { DetectorStateString.Running.name() }); + .generateGetAnomalyDetectorResponses(new String[] { detectorName }, new String[] { DetectorStateString.Running.name() }); @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); @@ -177,7 +180,7 @@ public void testRunWithRunningDetectorFalse() throws Exception { hits[0] = TestHelpers.generateSearchDetectorHit(detectorName, detectorId); SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers - .generateGetAnomalyDetectorResponses(new String[] { detectorId }, new String[] { DetectorStateString.Running.name() }); + .generateGetAnomalyDetectorResponses(new String[] { detectorName }, new String[] { DetectorStateString.Running.name() }); String expectedResponseStr = "AnomalyDetectors=[]TotalAnomalyDetectors=0"; @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); @@ -200,7 +203,7 @@ public void testRunWithRunningDetectorUndefined() throws Exception { hits[0] = TestHelpers.generateSearchDetectorHit(detectorName, detectorId); SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers - .generateGetAnomalyDetectorResponses(new String[] { detectorId }, new String[] { DetectorStateString.Running.name() }); + .generateGetAnomalyDetectorResponses(new String[] { detectorName }, new String[] { DetectorStateString.Running.name() }); @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); @@ -225,7 +228,7 @@ public void testRunWithNullRealtimeTask() throws Exception { hits[0] = TestHelpers.generateSearchDetectorHit(detectorName, detectorId); SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers - .generateGetAnomalyDetectorResponses(new String[] { detectorId }, new String[] { DetectorStateString.Running.name() }); + .generateGetAnomalyDetectorResponses(new String[] { detectorName }, new String[] { DetectorStateString.Running.name() }); // Overriding the mocked response to realtime task and setting to null. This occurs when // a detector is created but is never started. when(getDetectorProfileResponse.getRealtimeAdTask()).thenReturn(null); @@ -233,7 +236,7 @@ public void testRunWithNullRealtimeTask() throws Exception { ActionListener listener = Mockito.mock(ActionListener.class); mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); - tool.run(Map.of("disabled", "true"), listener); + tool.run(Map.of("running", "false"), listener); ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); String response = responseCaptor.getValue(); @@ -253,14 +256,14 @@ public void testRunWithTaskStateCreated() throws Exception { hits[0] = TestHelpers.generateSearchDetectorHit(detectorName, detectorId); SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers - .generateGetAnomalyDetectorResponses(new String[] { detectorId }, new String[] { DetectorStateString.Running.name() }); + .generateGetAnomalyDetectorResponses(new String[] { detectorName }, new String[] { DetectorStateString.Running.name() }); // Overriding the mocked response to set realtime task state to CREATED when(getDetectorProfileResponse.getRealtimeAdTask().getState()).thenReturn("CREATED"); @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); - tool.run(Map.of("running", "false"), listener); + tool.run(Map.of("running", "true"), listener); ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); String response = responseCaptor.getValue(); @@ -287,7 +290,7 @@ public void testRunWithTaskStateVariousFailed() throws Exception { SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers .generateGetAnomalyDetectorResponses( - new String[] { detectorId1, detectorId2, detectorId3 }, + new String[] { detectorName1, detectorName2, detectorName3 }, new String[] { "INIT_FAILURE", "UNEXPECTED_FAILURE", "FAILED" } ); @SuppressWarnings("unchecked") @@ -325,24 +328,22 @@ public void testRunWithCombinedDetectorStatesTrue() throws Exception { SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers .generateGetAnomalyDetectorResponses( - new String[] { detectorId1, detectorId2, detectorId3 }, + new String[] { detectorName1, detectorName2, detectorName3 }, new String[] { DetectorStateString.Running.name(), DetectorStateString.Disabled.name(), DetectorStateString.Failed.name() } ); @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); - tool.run(Map.of("running", "true", "disabled", "true", "failed", "true"), listener); + tool.run(Map.of("running", "true", "failed", "true"), listener); ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); String response = responseCaptor.getValue(); assertTrue(response.contains(String.format("id=%s", detectorId1))); assertTrue(response.contains(String.format("name=%s", detectorName1))); - assertTrue(response.contains(String.format("id=%s", detectorId2))); - assertTrue(response.contains(String.format("name=%s", detectorName2))); assertTrue(response.contains(String.format("id=%s", detectorId3))); assertTrue(response.contains(String.format("name=%s", detectorName3))); - assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", hits.length))); + assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", 2))); } @Test @@ -363,17 +364,17 @@ public void testRunWithCombinedDetectorStatesFalse() throws Exception { SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers .generateGetAnomalyDetectorResponses( - new String[] { detectorId1, detectorId2, detectorId3 }, + new String[] { detectorName1, detectorName2, detectorName3 }, new String[] { DetectorStateString.Running.name(), DetectorStateString.Disabled.name(), DetectorStateString.Failed.name() } ); @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); - tool.run(Map.of("running", "false", "disabled", "false", "failed", "false"), listener); + tool.run(Map.of("running", "false", "failed", "false"), listener); ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); - assertTrue(responseCaptor.getValue().contains("TotalAnomalyDetectors=0")); + assertTrue(responseCaptor.getValue().contains("TotalAnomalyDetectors=1")); } @Test @@ -394,22 +395,20 @@ public void testRunWithCombinedDetectorStatesMixed() throws Exception { SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); GetAnomalyDetectorResponse getDetectorProfileResponse = TestHelpers .generateGetAnomalyDetectorResponses( - new String[] { detectorId1, detectorId2, detectorId3 }, + new String[] { detectorName1, detectorName2, detectorName3 }, new String[] { DetectorStateString.Running.name(), DetectorStateString.Disabled.name(), DetectorStateString.Failed.name() } ); @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); mockProfileApiCalls(getDetectorsResponse, getDetectorProfileResponse); - tool.run(Map.of("running", "true", "disabled", "false", "failed", "true"), listener); + tool.run(Map.of("running", "true", "failed", "false"), listener); ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); String response = responseCaptor.getValue(); assertTrue(response.contains(String.format("id=%s", detectorId1))); assertTrue(response.contains(String.format("name=%s", detectorName1))); - assertTrue(response.contains(String.format("id=%s", detectorId3))); - assertTrue(response.contains(String.format("name=%s", detectorName3))); - assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", 2))); + assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", 1))); } @Test @@ -424,7 +423,6 @@ public void testParseParams() throws Exception { validParams.put("size", "10"); validParams.put("startIndex", "0"); validParams.put("running", "false"); - validParams.put("disabled", "false"); @SuppressWarnings("unchecked") ActionListener listener = Mockito.mock(ActionListener.class); diff --git a/src/test/java/org/opensearch/agent/tools/SearchAnomalyResultsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAnomalyResultsToolTests.java index 035b6406..ba11702f 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchAnomalyResultsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchAnomalyResultsToolTests.java @@ -33,13 +33,11 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchResponseSections; import org.opensearch.agent.tools.utils.ToolConstants; -import org.opensearch.client.AdminClient; -import org.opensearch.client.ClusterAdminClient; -import org.opensearch.client.IndicesAdminClient; import org.opensearch.client.node.NodeClient; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.action.ActionListener; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.ml.common.spi.tools.Tool; import org.opensearch.search.SearchHit; @@ -48,13 +46,9 @@ public class SearchAnomalyResultsToolTests { @Mock - private NodeClient nodeClient; - @Mock - private AdminClient adminClient; + private NamedWriteableRegistry namedWriteableRegistry; @Mock - private IndicesAdminClient indicesAdminClient; - @Mock - private ClusterAdminClient clusterAdminClient; + private NodeClient nodeClient; private Map nullParams; private Map emptyParams; @@ -63,7 +57,7 @@ public class SearchAnomalyResultsToolTests { @Before public void setup() { MockitoAnnotations.openMocks(this); - SearchAnomalyResultsTool.Factory.getInstance().init(nodeClient); + SearchAnomalyResultsTool.Factory.getInstance().init(nodeClient, namedWriteableRegistry); nullParams = null; emptyParams = Collections.emptyMap(); diff --git a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java index bb17b30f..cb139b22 100644 --- a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java +++ b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java @@ -138,6 +138,30 @@ protected String indexDetector(String detectorAsJsonString) { return parseFieldFromResponse(response, "_id").toString(); } + protected void startDetector(String detectorId) { + Response response = makeRequest( + client(), + "POST", + "_plugins/_anomaly_detection/detectors/" + detectorId + "/_start", + null, + (String) null, + null + ); + assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + } + + protected void stopDetector(String detectorId) { + Response response = makeRequest( + client(), + "POST", + "_plugins/_anomaly_detection/detectors/" + detectorId + "/_stop", + null, + (String) null, + null + ); + assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + } + protected void deleteDetector(String detectorId) { Response response = makeRequest( client(), diff --git a/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java b/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java index 2390a108..7fdc593c 100644 --- a/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java +++ b/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java @@ -60,7 +60,7 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_withNoSystemIndex() { @SneakyThrows @Order(2) public void testSearchAnomalyDetectorsToolInFlowAgent_detectorNameParam() { - String detectorId = ""; + String detectorId = null; try { setupTestDetectionIndex("test-index"); detectorId = ingestSampleDetector(detectorName, "test-index"); @@ -84,7 +84,7 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_detectorNameParam() { @SneakyThrows @Order(3) public void testSearchAnomalyDetectorsToolInFlowAgent_detectorNamePatternParam() { - String detectorId = ""; + String detectorId = null; try { setupTestDetectionIndex("test-index"); detectorId = ingestSampleDetector(detectorName, "test-index"); @@ -109,7 +109,7 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_detectorNamePatternParam() @SneakyThrows @Order(4) public void testSearchAnomalyDetectorsToolInFlowAgent_indicesParam() { - String detectorId = ""; + String detectorId = null; try { setupTestDetectionIndex("test-index"); detectorId = ingestSampleDetector(detectorName, "test-index"); @@ -132,7 +132,7 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_indicesParam() { @SneakyThrows @Order(5) public void testSearchAnomalyDetectorsToolInFlowAgent_highCardinalityParam() { - String detectorId = ""; + String detectorId = null; try { setupTestDetectionIndex("test-index"); detectorId = ingestSampleDetector(detectorName, "test-index"); @@ -156,6 +156,81 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_highCardinalityParam() { @SneakyThrows @Order(6) + public void testSearchAnomalyDetectorsToolInFlowAgent_detectorStateParams() { + String detectorIdRunning = null; + String detectorIdDisabled1 = null; + String detectorIdDisabled2 = null; + try { + // TODO: update test scenarios + setupTestDetectionIndex("test-index"); + detectorIdRunning = ingestSampleDetector(detectorName + "-running", "test-index"); + detectorIdDisabled1 = ingestSampleDetector(detectorName + "-disabled-1", "test-index"); + detectorIdDisabled2 = ingestSampleDetector(detectorName + "-disabled-2", "test-index"); + startDetector(detectorIdRunning); + Thread.sleep(5000); + + String agentId = createAgent(registerAgentRequestBody); + String agentInput = "{\"parameters\":{\"running\": \"true\"}}"; + String result = executeAgent(agentId, agentInput); + assertTrue(result.contains(String.format("TotalAnomalyDetectors=%d", 1))); + assertTrue(result.contains(detectorIdRunning)); + + String agentInput2 = "{\"parameters\":{\"running\": \"false\"}}"; + String result2 = executeAgent(agentId, agentInput2); + assertTrue(result2.contains(String.format("TotalAnomalyDetectors=%d", 2))); + assertTrue(result2.contains(detectorIdDisabled1)); + assertTrue(result2.contains(detectorIdDisabled2)); + + String agentInput3 = "{\"parameters\":{\"failed\": \"true\"}}"; + String result3 = executeAgent(agentId, agentInput3); + assertTrue(result3.contains(String.format("TotalAnomalyDetectors=%d", 0))); + + String agentInput4 = "{\"parameters\":{\"failed\": \"false\"}}"; + String result4 = executeAgent(agentId, agentInput4); + assertTrue(result4.contains(String.format("TotalAnomalyDetectors=%d", 3))); + assertTrue(result4.contains(detectorIdRunning)); + assertTrue(result4.contains(detectorIdDisabled1)); + assertTrue(result4.contains(detectorIdDisabled2)); + + String agentInput5 = "{\"parameters\":{\"running\": \"true\", \"failed\": \"true\"}}"; + String result5 = executeAgent(agentId, agentInput5); + assertTrue(result5.contains(String.format("TotalAnomalyDetectors=%d", 1))); + assertTrue(result5.contains(detectorIdRunning)); + + String agentInput6 = "{\"parameters\":{\"running\": \"true\", \"failed\": \"false\"}}"; + String result6 = executeAgent(agentId, agentInput6); + assertTrue(result6.contains(String.format("TotalAnomalyDetectors=%d", 1))); + assertTrue(result6.contains(detectorIdRunning)); + + String agentInput7 = "{\"parameters\":{\"running\": \"false\", \"failed\": \"true\"}}"; + String result7 = executeAgent(agentId, agentInput7); + assertTrue(result7.contains(String.format("TotalAnomalyDetectors=%d", 2))); + assertTrue(result7.contains(detectorIdDisabled1)); + assertTrue(result7.contains(detectorIdDisabled2)); + + String agentInput8 = "{\"parameters\":{\"running\": \"false\", \"failed\": \"false\"}}"; + String result8 = executeAgent(agentId, agentInput8); + assertTrue(result8.contains(String.format("TotalAnomalyDetectors=%d", 2))); + assertTrue(result8.contains(detectorIdDisabled1)); + assertTrue(result8.contains(detectorIdDisabled2)); + } finally { + if (detectorIdRunning != null) { + stopDetector(detectorIdRunning); + Thread.sleep(5000); + deleteDetector(detectorIdRunning); + } + if (detectorIdDisabled1 != null) { + deleteDetector(detectorIdDisabled1); + } + if (detectorIdDisabled2 != null) { + deleteDetector(detectorIdDisabled2); + } + } + + } + + @SneakyThrows + @Order(7) public void testSearchAnomalyDetectorsToolInFlowAgent_complexParams() { String detectorId = null; String detectorIdFoo = null; diff --git a/src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java b/src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java index d9afb684..0a9e192e 100644 --- a/src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java +++ b/src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java @@ -12,6 +12,9 @@ import org.junit.After; import org.junit.Before; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.MethodOrderer.OrderAnnotation; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.TestMethodOrder; import org.opensearch.agent.tools.utils.ToolConstants; import com.google.gson.Gson; @@ -19,6 +22,7 @@ import lombok.SneakyThrows; +@TestMethodOrder(OrderAnnotation.class) public class SearchAnomalyResultsToolIT extends BaseAgentToolsIT { private String registerAgentRequestBody; private String resultsIndexMappings; @@ -35,6 +39,8 @@ public class SearchAnomalyResultsToolIT extends BaseAgentToolsIT { @Before @SneakyThrows public void setUp() { + deleteExternalIndices(); + deleteSystemIndices(); super.setUp(); registerAgentRequestBody = Files.readString(Path.of(this.getClass().getClassLoader().getResource(registerAgentFilepath).toURI())); resultsIndexMappings = Files @@ -57,6 +63,7 @@ public void tearDown() { } @SneakyThrows + @Order(1) public void testSearchAnomalyResultsToolInFlowAgent_withNoSystemIndex() { String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"detectorId\": \"" + detectorId + "\"}}"; @@ -65,6 +72,7 @@ public void testSearchAnomalyResultsToolInFlowAgent_withNoSystemIndex() { } @SneakyThrows + @Order(2) public void testSearchAnomalyResultsToolInFlowAgent_noMatching() { setupADSystemIndices(); ingestSampleResult(detectorId, 0.5, 0.5, "1"); @@ -75,6 +83,7 @@ public void testSearchAnomalyResultsToolInFlowAgent_noMatching() { } @SneakyThrows + @Order(3) public void testSearchAnomalyResultsToolInFlowAgent_matching() { setupADSystemIndices(); ingestSampleResult(detectorId, anomalyGrade, confidence, "1"); @@ -96,6 +105,7 @@ public void testSearchAnomalyResultsToolInFlowAgent_matching() { } @SneakyThrows + @Order(4) public void testSearchAnomalyResultsToolInFlowAgent_complexParams() { setupADSystemIndices(); ingestSampleResult(detectorId, anomalyGrade, confidence, "1"); @@ -107,18 +117,7 @@ public void testSearchAnomalyResultsToolInFlowAgent_complexParams() { + "\"realTime\": true, \"anomalyGradeThreshold\": 0, \"sortOrder\": \"asc\"," + "\"sortString\": \"data_start_time\", \"size\": 10, \"startIndex\": 0 }}"; String result = executeAgent(agentId, agentInput); - assertEquals( - String - .format( - Locale.ROOT, - "AnomalyResults=[{detectorId=%s,grade=%2.1f,confidence=%2.1f}]TotalAnomalyResults=%d", - detectorId, - anomalyGrade, - confidence, - 1 - ), - result - ); + assertTrue(result.contains(String.format("TotalAnomalyResults=%d", 1))); } @SneakyThrows From 788997509c3ceba04d8a7ac06e2908c583d3d648 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 28 Feb 2024 10:47:57 -0800 Subject: [PATCH 069/119] Updated release notes file name (#241) (#242) (cherry picked from commit ad2c4f3cb7851b5c0d5b031985402e61fa019c5d) Signed-off-by: owaiskazi19 Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- ...es-2.12.0.0.md => opensearch-skills.release-notes-2.12.0.0.md} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename release-notes/{skills.release-notes-2.12.0.0.md => opensearch-skills.release-notes-2.12.0.0.md} (100%) diff --git a/release-notes/skills.release-notes-2.12.0.0.md b/release-notes/opensearch-skills.release-notes-2.12.0.0.md similarity index 100% rename from release-notes/skills.release-notes-2.12.0.0.md rename to release-notes/opensearch-skills.release-notes-2.12.0.0.md From c506f24ca60b4157d60022788c14ccec0ea77605 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 15:24:36 +0800 Subject: [PATCH 070/119] Update dependency org.apache.commons:commons-lang3 to v3.14.0 (#47) (#244) (cherry picked from commit 46dc48bcb11d64b343949bd6a9ca127eea027aa2) Signed-off-by: mend-for-github-com[bot] Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] Co-authored-by: mend-for-github-com[bot] <50673670+mend-for-github-com[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 2504c8d8..4c9dad07 100644 --- a/build.gradle +++ b/build.gradle @@ -119,7 +119,7 @@ dependencies { compileOnly "org.apache.logging.log4j:log4j-slf4j-impl:2.22.0" compileOnly group: 'org.json', name: 'json', version: '20231013' compileOnly("com.google.guava:guava:33.0.0-jre") - compileOnly group: 'org.apache.commons', name: 'commons-lang3', version: '3.10' + compileOnly group: 'org.apache.commons', name: 'commons-lang3', version: '3.14.0' compileOnly group: 'org.apache.commons', name: 'commons-text', version: '1.10.0' // Plugin dependencies From cfb419fe0c49d211eb55a99578135617ae8d805a Mon Sep 17 00:00:00 2001 From: zane-neo Date: Fri, 1 Mar 2024 15:33:05 +0800 Subject: [PATCH 071/119] Add lebeler.yml to 2.x branch (#247) Signed-off-by: zane-neo --- .github/workflows/labeler.yml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 .github/workflows/labeler.yml diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml new file mode 100644 index 00000000..f5abac63 --- /dev/null +++ b/.github/workflows/labeler.yml @@ -0,0 +1,26 @@ +name: "Pull Request Labeler" +on: + pull_request_target: + branches: + - main + types: + - opened + +jobs: + label: + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + steps: + - name: GitHub App token + id: github_app_token + uses: tibdex/github-app-token@v2.1.0 + with: + app_id: ${{ secrets.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + installation_id: 22958780 + - name: Label + uses: actions/labeler@v5 + with: + repo-token: ${{ steps.github_app_token.outputs.token }} From df9820e923bb5a40a567a7228797ffd8c3046a23 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 15:55:23 +0800 Subject: [PATCH 072/119] Update dependency org.apache.commons:commons-text to v1.11.0 (#62) (#248) (cherry picked from commit 15c4d640e1daa0bc24eeffb64fe706527eaf8582) Signed-off-by: mend-for-github-com[bot] Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] Co-authored-by: mend-for-github-com[bot] <50673670+mend-for-github-com[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 4c9dad07..266990c6 100644 --- a/build.gradle +++ b/build.gradle @@ -120,7 +120,7 @@ dependencies { compileOnly group: 'org.json', name: 'json', version: '20231013' compileOnly("com.google.guava:guava:33.0.0-jre") compileOnly group: 'org.apache.commons', name: 'commons-lang3', version: '3.14.0' - compileOnly group: 'org.apache.commons', name: 'commons-text', version: '1.10.0' + compileOnly group: 'org.apache.commons', name: 'commons-text', version: '1.11.0' // Plugin dependencies compileOnly group: 'org.opensearch', name:'opensearch-ml-client', version: "${opensearch_build}" From cfd0edef08803af6df5e46af9f69aaa4a75e385d Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 16:13:15 +0800 Subject: [PATCH 073/119] Update plugin io.freefair.lombok to v8.6 (#245) (#249) (cherry picked from commit 9186ecc6e803c6463e2e0b0db259d444c099ea4a) Signed-off-by: mend-for-github-com[bot] Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] Co-authored-by: mend-for-github-com[bot] <50673670+mend-for-github-com[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 266990c6..48466997 100644 --- a/build.gradle +++ b/build.gradle @@ -44,7 +44,7 @@ buildscript { plugins { id 'java-library' id 'com.diffplug.spotless' version '6.23.0' - id "io.freefair.lombok" version "8.4" + id "io.freefair.lombok" version "8.6" id "de.undercouch.download" version "5.3.0" } From e9b46fff8568d24a0419a250beec51623b7b742e Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 16:54:45 +0800 Subject: [PATCH 074/119] Update plugin de.undercouch.download to v5.6.0 (#239) (#250) (cherry picked from commit 28644b42c0cdbce92b5c79b277f0a572c40068a2) Signed-off-by: mend-for-github-com[bot] Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] Co-authored-by: mend-for-github-com[bot] <50673670+mend-for-github-com[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 48466997..42640ee7 100644 --- a/build.gradle +++ b/build.gradle @@ -45,7 +45,7 @@ plugins { id 'java-library' id 'com.diffplug.spotless' version '6.23.0' id "io.freefair.lombok" version "8.6" - id "de.undercouch.download" version "5.3.0" + id "de.undercouch.download" version "5.6.0" } lombok { From bed81ddc2f420cb45b8d0a8ace207789d15e39ae Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 17:04:35 +0800 Subject: [PATCH 075/119] Update plugin com.diffplug.spotless to v6.25.0 (#127) (#252) (cherry picked from commit 37b68bfce8e7288fad53a932120b2adaa8342792) Signed-off-by: mend-for-github-com[bot] Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] Co-authored-by: mend-for-github-com[bot] <50673670+mend-for-github-com[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 42640ee7..96d924f1 100644 --- a/build.gradle +++ b/build.gradle @@ -43,7 +43,7 @@ buildscript { plugins { id 'java-library' - id 'com.diffplug.spotless' version '6.23.0' + id 'com.diffplug.spotless' version '6.25.0' id "io.freefair.lombok" version "8.6" id "de.undercouch.download" version "5.6.0" } From 9f4e83108327359e80bb8e0ffd67d7884ead421e Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 17:06:34 +0800 Subject: [PATCH 076/119] Update dependency org.json:json to v20240205 (#246) (#251) (cherry picked from commit 7335b09feba20317caf4ab6ac53696f3dda877b5) Signed-off-by: mend-for-github-com[bot] Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] Co-authored-by: mend-for-github-com[bot] <50673670+mend-for-github-com[bot]@users.noreply.github.com> --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index 96d924f1..c8a2bf78 100644 --- a/build.gradle +++ b/build.gradle @@ -117,7 +117,7 @@ dependencies { // 3P dependencies compileOnly group: 'com.google.code.gson', name: 'gson', version: '2.10.1' compileOnly "org.apache.logging.log4j:log4j-slf4j-impl:2.22.0" - compileOnly group: 'org.json', name: 'json', version: '20231013' + compileOnly group: 'org.json', name: 'json', version: '20240205' compileOnly("com.google.guava:guava:33.0.0-jre") compileOnly group: 'org.apache.commons', name: 'commons-lang3', version: '3.14.0' compileOnly group: 'org.apache.commons', name: 'commons-text', version: '1.11.0' @@ -145,7 +145,7 @@ dependencies { // Test dependencies testImplementation "org.opensearch.test:framework:${opensearch_version}" testImplementation group: 'junit', name: 'junit', version: '4.13.2' - testImplementation group: 'org.json', name: 'json', version: '20231013' + testImplementation group: 'org.json', name: 'json', version: '20240205' testImplementation group: 'org.mockito', name: 'mockito-core', version: '5.10.0' testImplementation group: 'org.mockito', name: 'mockito-inline', version: '5.2.0' testImplementation("net.bytebuddy:byte-buddy:1.14.7") From 63f0e5c74a43f8a73e2c9a37e448b03ed873a1d7 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 12:03:29 +0800 Subject: [PATCH 077/119] Add IT for VectorDBTool and NeuralSparseTool (#177) (#255) * Add IT for VectorDBTool and NeuralSparseTool * changed to smaller models and refine IT tests * add match full prompt * fix digits rounding differences in CI --------- (cherry picked from commit cbe5fa231ecd0ba0aa7a2d7fe17e9b3e74da576a) Signed-off-by: Mingshi Liu Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../org/opensearch/agent/tools/RAGTool.java | 2 +- .../integTest/BaseAgentToolsIT.java | 6 + .../org/opensearch/integTest/RAGToolIT.java | 515 ++++++++++++++++++ .../opensearch/integTest/VectorDBToolIT.java | 199 +++++++ ...l_with_neural_query_type_request_body.json | 23 + ...w_agent_of_vectordb_tool_request_body.json | 17 + ...ter_text_embedding_model_request_body.json | 14 + 7 files changed, 775 insertions(+), 1 deletion(-) create mode 100644 src/test/java/org/opensearch/integTest/RAGToolIT.java create mode 100644 src/test/java/org/opensearch/integTest/VectorDBToolIT.java create mode 100644 src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_ragtool_with_neural_query_type_request_body.json create mode 100644 src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_vectordb_tool_request_body.json create mode 100644 src/test/resources/org/opensearch/agent/tools/register_text_embedding_model_request_body.json diff --git a/src/main/java/org/opensearch/agent/tools/RAGTool.java b/src/main/java/org/opensearch/agent/tools/RAGTool.java index 6c341b05..e8159839 100644 --- a/src/main/java/org/opensearch/agent/tools/RAGTool.java +++ b/src/main/java/org/opensearch/agent/tools/RAGTool.java @@ -56,7 +56,7 @@ public class RAGTool implements Tool { public static final String EMBEDDING_FIELD = "embedding_field"; public static final String OUTPUT_FIELD = "output_field"; public static final String QUERY_TYPE = "query_type"; - public static final String CONTENT_GENERATION_FIELD = "enable_Content_Generation"; + public static final String CONTENT_GENERATION_FIELD = "enable_content_generation"; public static final String K_FIELD = "k"; private final AbstractRetrieverTool queryTool; private String name = TYPE; diff --git a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java index cb139b22..3ee1a406 100644 --- a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java +++ b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java @@ -241,6 +241,12 @@ protected void createIndexWithConfiguration(String indexName, String indexConfig assertEquals(indexName, responseInMap.get("index").toString()); } + protected void createIngestPipelineWithConfiguration(String pipelineName, String body) throws Exception { + Response response = makeRequest(client(), "PUT", "/_ingest/pipeline/" + pipelineName, null, body, null); + Map responseInMap = parseResponseToMap(response); + assertEquals("true", responseInMap.get("acknowledged").toString()); + } + // Similar to deleteExternalIndices, but including indices with "." prefix vs. excluding them protected void deleteSystemIndices() throws IOException { final Response response = client().performRequest(new Request("GET", "/_cat/indices?format=json" + "&expand_wildcards=all")); diff --git a/src/test/java/org/opensearch/integTest/RAGToolIT.java b/src/test/java/org/opensearch/integTest/RAGToolIT.java new file mode 100644 index 00000000..a444e7dd --- /dev/null +++ b/src/test/java/org/opensearch/integTest/RAGToolIT.java @@ -0,0 +1,515 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Map; + +import org.junit.After; +import org.junit.Before; +import org.opensearch.agent.tools.RAGTool; +import org.opensearch.client.ResponseException; + +import lombok.SneakyThrows; + +public class RAGToolIT extends ToolIntegrationTest { + + public static String TEST_NEURAL_INDEX_NAME = "test_neural_index"; + public static String TEST_NEURAL_SPARSE_INDEX_NAME = "test_neural_sparse_index"; + private String textEmbeddingModelId; + private String sparseEncodingModelId; + private String largeLanguageModelId; + private String registerAgentWithNeuralQueryRequestBody; + private String registerAgentWithNeuralSparseQueryRequestBody; + private String registerAgentWithNeuralQueryAndLLMRequestBody; + private String mockLLMResponseWithSource = "{\n" + + " \"inference_results\": [\n" + + " {\n" + + " \"output\": [\n" + + " {\n" + + " \"name\": \"response\",\n" + + " \"result\": \"\"\" Based on the context given:\n" + + " a, b, c are alphabets.\"\"\"\n" + + " }\n" + + " ]\n" + + " }\n" + + " ]\n" + + "}"; + private String mockLLMResponseWithoutSource = "{\n" + + " \"inference_results\": [\n" + + " {\n" + + " \"output\": [\n" + + " {\n" + + " \"name\": \"response\",\n" + + " \"result\": \"\"\" Based on the context given:\n" + + " I do not see any information about a, b, c\". So I would have to say I don't know the answer to your question based on this context..\"\"\"\n" + + " }\n" + + " ]\n" + + " }\n" + + " ]\n" + + "}"; + private String registerAgentWithNeuralSparseQueryAndLLMRequestBody; + + public RAGToolIT() throws IOException, URISyntaxException {} + + @SneakyThrows + private void prepareModel() { + String requestBody = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource("org/opensearch/agent/tools/register_text_embedding_model_request_body.json") + .toURI() + ) + ); + textEmbeddingModelId = registerModelThenDeploy(requestBody); + + String requestBody1 = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource("org/opensearch/agent/tools/register_sparse_encoding_model_request_body.json") + .toURI() + ) + ); + sparseEncodingModelId = registerModelThenDeploy(requestBody1); + largeLanguageModelId = this.modelId; + } + + @SneakyThrows + private void prepareIndex() { + // prepare index for neural sparse query type + createIndexWithConfiguration( + TEST_NEURAL_SPARSE_INDEX_NAME, + "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"text\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"embedding\": {\n" + + " \"type\": \"rank_features\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}" + ); + addDocToIndex( + TEST_NEURAL_SPARSE_INDEX_NAME, + "0", + List.of("text", "embedding"), + List.of("hello world", Map.of("hello", 1, "world", 2)) + ); + addDocToIndex(TEST_NEURAL_SPARSE_INDEX_NAME, "1", List.of("text", "embedding"), List.of("a b", Map.of("a", 3, "b", 4))); + + // prepare index for neural query type + String pipelineConfig = "{\n" + + " \"description\": \"text embedding pipeline\",\n" + + " \"processors\": [\n" + + " {\n" + + " \"text_embedding\": {\n" + + " \"model_id\": \"" + + textEmbeddingModelId + + "\",\n" + + " \"field_map\": {\n" + + " \"text\": \"embedding\"\n" + + " }\n" + + " }\n" + + " }\n" + + " ]\n" + + "}"; + createIngestPipelineWithConfiguration("test-embedding-model", pipelineConfig); + + String indexMapping = "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"text\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"embedding\": {\n" + + " \"type\": \"knn_vector\",\n" + + " \"dimension\": 768,\n" + + " \"method\": {\n" + + " \"name\": \"hnsw\",\n" + + " \"space_type\": \"l2\",\n" + + " \"engine\": \"lucene\",\n" + + " \"parameters\": {\n" + + " \"ef_construction\": 128,\n" + + " \"m\": 24\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"settings\": {\n" + + " \"index\": {\n" + + " \"knn.space_type\": \"cosinesimil\",\n" + + " \"default_pipeline\": \"test-embedding-model\",\n" + + " \"knn\": \"true\"\n" + + " }\n" + + " }\n" + + "}"; + + createIndexWithConfiguration(TEST_NEURAL_INDEX_NAME, indexMapping); + + addDocToIndex(TEST_NEURAL_INDEX_NAME, "0", List.of("text"), List.of("hello world")); + + addDocToIndex(TEST_NEURAL_INDEX_NAME, "1", List.of("text"), List.of("a b")); + } + + @Before + @SneakyThrows + public void setUp() { + super.setUp(); + prepareModel(); + prepareIndex(); + String registerAgentWithNeuralQueryRequestBodyFile = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource( + "org/opensearch/agent/tools/register_flow_agent_of_ragtool_with_neural_query_type_request_body.json" + ) + .toURI() + ) + ); + registerAgentWithNeuralQueryRequestBody = registerAgentWithNeuralQueryRequestBodyFile + .replace("", textEmbeddingModelId) + .replace("", TEST_NEURAL_INDEX_NAME); + + registerAgentWithNeuralSparseQueryRequestBody = registerAgentWithNeuralQueryRequestBodyFile + .replace("", sparseEncodingModelId) + .replace("", TEST_NEURAL_SPARSE_INDEX_NAME) + .replace("\"query_type\": \"neural\"", "\"query_type\": \"neural_sparse\""); + + registerAgentWithNeuralQueryAndLLMRequestBody = registerAgentWithNeuralQueryRequestBodyFile + .replace("", textEmbeddingModelId + "\" ,\n \"inference_model_id\": \"" + largeLanguageModelId) + .replace("", TEST_NEURAL_INDEX_NAME) + .replace("false", "true"); + registerAgentWithNeuralSparseQueryAndLLMRequestBody = registerAgentWithNeuralQueryRequestBodyFile + .replace("", sparseEncodingModelId + "\" ,\n \"inference_model_id\": \"" + largeLanguageModelId) + .replace("", TEST_NEURAL_SPARSE_INDEX_NAME) + .replace("\"query_type\": \"neural\"", "\"query_type\": \"neural_sparse\"") + .replace("false", "true"); + } + + @After + @SneakyThrows + public void tearDown() { + super.tearDown(); + deleteExternalIndices(); + deleteModel(textEmbeddingModelId); + deleteModel(sparseEncodingModelId); + } + + public void testRAGToolWithNeuralQuery() { + String agentId = createAgent(registerAgentWithNeuralQueryRequestBody); + + // neural query to test match similar text, doc1 match with higher score + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"c\"}}"); + + // To allow digits variation from model output, using string contains to match + assertTrue( + result.contains("{\"_index\":\"test_neural_index\",\"_source\":{\"text\":\"hello world\"},\"_id\":\"0\",\"_score\":0.70467") + ); + assertTrue(result.contains("{\"_index\":\"test_neural_index\",\"_source\":{\"text\":\"a b\"},\"_id\":\"1\",\"_score\":0.26499")); + + // neural query to test match exact same text case, doc0 match with higher score + String result1 = executeAgent(agentId, "{\"parameters\": {\"question\": \"hello\"}}"); + + // To allow digits variation from model output, using string contains to match + assertTrue( + result1.contains("{\"_index\":\"test_neural_index\",\"_source\":{\"text\":\"hello world\"},\"_id\":\"0\",\"_score\":0.5671488") + ); + assertTrue(result1.contains("{\"_index\":\"test_neural_index\",\"_source\":{\"text\":\"a b\"},\"_id\":\"1\",\"_score\":0.2423683")); + + // if blank input, call onFailure and get exception + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("[input] is null or empty, can not process it."), containsString("IllegalArgumentException")) + ); + + } + + public void testRAGToolWithNeuralQueryAndLLM() { + String agentId = createAgent(registerAgentWithNeuralQueryAndLLMRequestBody); + + // neural query to test match similar text, doc1 match with higher score + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"use RAGTool to answer a\"}}"); + assertEquals(mockLLMResponseWithSource, result); + + // if blank input, call onFailure and get exception + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("[input] is null or empty, can not process it."), containsString("IllegalArgumentException")) + ); + + } + + public void testRAGToolWithNeuralSparseQuery() { + String agentId = createAgent(registerAgentWithNeuralSparseQueryRequestBody); + + // neural sparse query to test match extract same text, doc1 match with high score + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}"); + assertEquals( + "The agent execute response not equal with expected.", + "{\"_index\":\"test_neural_sparse_index\",\"_source\":{\"text\":\"a b\"},\"_id\":\"1\",\"_score\":1.2068367}\n", + result + ); + + // neural sparse query to test match extract non-existed text, no match + String result2 = executeAgent(agentId, "{\"parameters\": {\"question\": \"c\"}}"); + assertEquals("The agent execute response not equal with expected.", "Can not get any match from search result.", result2); + + // if blank input, call onFailure and get exception + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("[input] is null or empty, can not process it."), containsString("IllegalArgumentException")) + ); + } + + public void testRAGToolWithNeuralSparseQueryAndLLM() { + String agentId = createAgent(registerAgentWithNeuralSparseQueryAndLLMRequestBody); + + // neural sparse query to test match extract same text, doc1 match with high score + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"use RAGTool to answer a\"}}"); + assertEquals(mockLLMResponseWithSource, result); + + // if blank input, call onFailure and get exception + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("[input] is null or empty, can not process it."), containsString("IllegalArgumentException")) + ); + } + + public void testRAGToolWithNeuralSparseQuery_withIllegalSourceField_thenGetEmptySource() { + String agentId = createAgent(registerAgentWithNeuralSparseQueryRequestBody.replace("text", "text2")); + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}"); + assertEquals( + "The agent execute response not equal with expected.", + "{\"_index\":\"test_neural_sparse_index\",\"_source\":{},\"_id\":\"1\",\"_score\":1.2068367}\n", + result + ); + } + + public void testRAGToolWithNeuralSparseQueryAndLLM_withIllegalSourceField_thenGetEmptySource() { + String agentId = createAgent(registerAgentWithNeuralSparseQueryAndLLMRequestBody.replace("text", "text2")); + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}"); + assertEquals(mockLLMResponseWithoutSource, result); + } + + public void testRAGToolWithNeuralQuery_withIllegalSourceField_thenGetEmptySource() { + String agentId = createAgent(registerAgentWithNeuralQueryRequestBody.replace("text", "text2")); + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}"); + + // To allow digits variation from model output, using string contains to match + assertTrue(result.contains("{\"_index\":\"test_neural_index\",\"_source\":{},\"_id\":\"0\",\"_score\":0.70493")); + assertTrue(result.contains("{\"_index\":\"test_neural_index\",\"_source\":{},\"_id\":\"1\",\"_score\":0.26505")); + + } + + public void testRAGToolWithNeuralQueryAndLLM_withIllegalSourceField_thenGetEmptySource() { + String agentId = createAgent(registerAgentWithNeuralQueryAndLLMRequestBody.replace("text", "text2")); + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}"); + assertEquals(mockLLMResponseWithoutSource, result); + } + + public void testRAGToolWithNeuralSparseQuery_withIllegalEmbeddingField_thenThrowException() { + String agentId = createAgent(registerAgentWithNeuralSparseQueryRequestBody.replace("\"embedding\"", "\"embedding2\"")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("all shards failed"), containsString("SearchPhaseExecutionException")) + ); + } + + public void testRAGToolWithNeuralSparseQueryAndLLM_withIllegalEmbeddingField_thenThrowException() { + String agentId = createAgent(registerAgentWithNeuralSparseQueryAndLLMRequestBody.replace("\"embedding\"", "\"embedding2\"")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("all shards failed"), containsString("SearchPhaseExecutionException")) + ); + } + + public void testRAGToolWithNeuralQuery_withIllegalEmbeddingField_thenThrowException() { + String agentId = createAgent(registerAgentWithNeuralQueryRequestBody.replace("\"embedding\"", "\"embedding2\"")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("all shards failed"), containsString("SearchPhaseExecutionException")) + ); + } + + public void testRAGToolWithNeuralQueryAndLLM_withIllegalEmbeddingField_thenThrowException() { + String agentId = createAgent(registerAgentWithNeuralQueryAndLLMRequestBody.replace("\"embedding\"", "\"embedding2\"")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("all shards failed"), containsString("SearchPhaseExecutionException")) + ); + } + + public void testRAGToolWithNeuralSparseQuery_withIllegalIndexField_thenThrowException() { + String agentId = createAgent(registerAgentWithNeuralSparseQueryRequestBody.replace(TEST_NEURAL_SPARSE_INDEX_NAME, "test_index2")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("no such index [test_index2]"), containsString("IndexNotFoundException")) + ); + } + + public void testRAGToolWithNeuralSparseQueryAndLLM_withIllegalIndexField_thenThrowException() { + String agentId = createAgent( + registerAgentWithNeuralSparseQueryAndLLMRequestBody.replace(TEST_NEURAL_SPARSE_INDEX_NAME, "test_index2") + ); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("no such index [test_index2]"), containsString("IndexNotFoundException")) + ); + } + + public void testRAGToolWithNeuralQuery_withIllegalIndexField_thenThrowException() { + String agentId = createAgent(registerAgentWithNeuralQueryRequestBody.replace(TEST_NEURAL_INDEX_NAME, "test_index2")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("no such index [test_index2]"), containsString("IndexNotFoundException")) + ); + } + + public void testRAGToolWithNeuralQueryAndLLM_withIllegalIndexField_thenThrowException() { + String agentId = createAgent(registerAgentWithNeuralQueryAndLLMRequestBody.replace(TEST_NEURAL_INDEX_NAME, "test_index2")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("no such index [test_index2]"), containsString("IndexNotFoundException")) + ); + } + + public void testRAGToolWithNeuralSparseQuery_withIllegalModelIdField_thenThrowException() { + String agentId = createAgent(registerAgentWithNeuralSparseQueryRequestBody.replace(sparseEncodingModelId, "test_model_id")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat(exception.getMessage(), allOf(containsString("Failed to find model"), containsString("OpenSearchStatusException"))); + } + + public void testRAGToolWithNeuralSparseQueryAndLLM_withIllegalModelIdField_thenThrowException() { + String agentId = createAgent(registerAgentWithNeuralSparseQueryAndLLMRequestBody.replace(sparseEncodingModelId, "test_model_id")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat(exception.getMessage(), allOf(containsString("Failed to find model"), containsString("OpenSearchStatusException"))); + } + + public void testRAGToolWithNeuralQuery_withIllegalModelIdField_thenThrowException() { + String agentId = createAgent(registerAgentWithNeuralQueryRequestBody.replace(textEmbeddingModelId, "test_model_id")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat(exception.getMessage(), allOf(containsString("Failed to find model"), containsString("OpenSearchStatusException"))); + } + + public void testRAGToolWithNeuralQueryAndLLM_withIllegalModelIdField_thenThrowException() { + String agentId = createAgent(registerAgentWithNeuralQueryAndLLMRequestBody.replace(textEmbeddingModelId, "test_model_id")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat(exception.getMessage(), allOf(containsString("Failed to find model"), containsString("OpenSearchStatusException"))); + } + + @Override + List promptHandlers() { + PromptHandler RAGToolHandler = new PromptHandler() { + @Override + String response(String prompt) { + String expectPromptForNeuralSparseQuery = "\n" + + "\nHuman:You are a professional data analyst. You will always answer question based on the given context first. If the answer is not directly shown in the context, you will analyze the data and find the answer. If you don't know the answer, just say don't know. \n" + + "\n" + + " Context:\n" + + "\"_id: 1\\n_source: {\\\"text\\\":\\\"a b\\\"}\\n\"\n" + + "\n" + + "Human:use RAGTool to answer a\n" + + "\n" + + "Assistant:"; + String expectPromptForNeuralQuery = "\n" + + "\n" + + "Human:You are a professional data analyst. You will always answer question based on the given context first. If the answer is not directly shown in the context, you will analyze the data and find the answer. If you don't know the answer, just say don't know. \n" + + "\n" + + " Context:\n" + + "\"_id: 1\\n_source: {\\\"text\\\":\\\"a b\\\"}\\n_id: 0\\n_source: {\\\"text\\\":\\\"hello world\\\"}\\n\"\n" + + "\n" + + "Human:use RAGTool to answer a\n" + + "\n" + + "Assistant:"; + if (prompt.equals(expectPromptForNeuralSparseQuery) || prompt.equals(expectPromptForNeuralQuery)) { + return mockLLMResponseWithSource; + } else { + return mockLLMResponseWithoutSource; + } + } + + @Override + boolean apply(String prompt) { + return true; + } + }; + return List.of(RAGToolHandler); + } + + @Override + String toolType() { + return RAGTool.TYPE; + } +} diff --git a/src/test/java/org/opensearch/integTest/VectorDBToolIT.java b/src/test/java/org/opensearch/integTest/VectorDBToolIT.java new file mode 100644 index 00000000..668564dc --- /dev/null +++ b/src/test/java/org/opensearch/integTest/VectorDBToolIT.java @@ -0,0 +1,199 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; +import static org.junit.Assert.assertThrows; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; + +import org.junit.After; +import org.junit.Before; +import org.opensearch.client.ResponseException; + +import lombok.SneakyThrows; + +public class VectorDBToolIT extends BaseAgentToolsIT { + + public static String TEST_INDEX_NAME = "test_index"; + + private String modelId; + private String registerAgentRequestBody; + + @SneakyThrows + private void prepareModel() { + String requestBody = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource("org/opensearch/agent/tools/register_text_embedding_model_request_body.json") + .toURI() + ) + ); + modelId = registerModelThenDeploy(requestBody); + } + + @SneakyThrows + private void prepareIndex() { + + String pipelineConfig = "{\n" + + " \"description\": \"text embedding pipeline\",\n" + + " \"processors\": [\n" + + " {\n" + + " \"text_embedding\": {\n" + + " \"model_id\": \"" + + modelId + + "\",\n" + + " \"field_map\": {\n" + + " \"text\": \"embedding\"\n" + + " }\n" + + " }\n" + + " }\n" + + " ]\n" + + "}"; + createIngestPipelineWithConfiguration("test-embedding-model", pipelineConfig); + + String indexMapping = "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"text\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"embedding\": {\n" + + " \"type\": \"knn_vector\",\n" + + " \"dimension\": 768,\n" + + " \"method\": {\n" + + " \"name\": \"hnsw\",\n" + + " \"space_type\": \"l2\",\n" + + " \"engine\": \"lucene\",\n" + + " \"parameters\": {\n" + + " \"ef_construction\": 128,\n" + + " \"m\": 24\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"settings\": {\n" + + " \"index\": {\n" + + " \"knn.space_type\": \"cosinesimil\",\n" + + " \"default_pipeline\": \"test-embedding-model\",\n" + + " \"knn\": \"true\"\n" + + " }\n" + + " }\n" + + "}"; + + createIndexWithConfiguration(TEST_INDEX_NAME, indexMapping); + + addDocToIndex(TEST_INDEX_NAME, "0", List.of("text"), List.of("hello world")); + + addDocToIndex(TEST_INDEX_NAME, "1", List.of("text"), List.of("a b")); + } + + @Before + @SneakyThrows + public void setUp() { + super.setUp(); + prepareModel(); + prepareIndex(); + registerAgentRequestBody = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource("org/opensearch/agent/tools/register_flow_agent_of_vectordb_tool_request_body.json") + .toURI() + ) + ); + registerAgentRequestBody = registerAgentRequestBody.replace("", modelId); + + } + + @After + @SneakyThrows + public void tearDown() { + super.tearDown(); + deleteExternalIndices(); + deleteModel(modelId); + } + + public void testVectorDBToolInFlowAgent() { + String agentId = createAgent(registerAgentRequestBody); + + // match similar text, doc1 match with higher score + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"c\"}}"); + + // To allow digits variation from model output, using string contains to match + assertTrue(result.contains("{\"_index\":\"test_index\",\"_source\":{\"text\":\"hello world\"},\"_id\":\"0\",\"_score\":0.70467")); + assertTrue(result.contains("{\"_index\":\"test_index\",\"_source\":{\"text\":\"a b\"},\"_id\":\"1\",\"_score\":0.26499")); + + // match exact same text case, doc0 match with higher score + String result1 = executeAgent(agentId, "{\"parameters\": {\"question\": \"hello\"}}"); + + // To allow digits variation from model output, using string contains to match + assertTrue( + result1.contains("{\"_index\":\"test_index\",\"_source\":{\"text\":\"hello world\"},\"_id\":\"0\",\"_score\":0.5671488") + ); + assertTrue(result1.contains("{\"_index\":\"test_index\",\"_source\":{\"text\":\"a b\"},\"_id\":\"1\",\"_score\":0.2423683")); + + // if blank input, call onFailure and get exception + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("[input] is null or empty, can not process it."), containsString("IllegalArgumentException")) + ); + } + + public void testVectorDBToolInFlowAgent_withIllegalSourceField_thenGetEmptySource() { + String agentId = createAgent(registerAgentRequestBody.replace("text", "text2")); + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}"); + + // To allow digits variation from model output, using string contains to match + assertTrue(result.contains("{\"_index\":\"test_index\",\"_source\":{},\"_id\":\"0\",\"_score\":0.70493")); + assertTrue(result.contains("{\"_index\":\"test_index\",\"_source\":{},\"_id\":\"1\",\"_score\":0.26505")); + + } + + public void testVectorDBToolInFlowAgent_withIllegalEmbeddingField_thenThrowException() { + String agentId = createAgent(registerAgentRequestBody.replace("\"embedding\"", "\"embedding2\"")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("all shards failed"), containsString("SearchPhaseExecutionException")) + ); + } + + public void testVectorDBToolInFlowAgent_withIllegalIndexField_thenThrowException() { + String agentId = createAgent(registerAgentRequestBody.replace("test_index", "test_index2")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat( + exception.getMessage(), + allOf(containsString("no such index [test_index2]"), containsString("IndexNotFoundException")) + ); + } + + public void testVectorDBToolInFlowAgent_withIllegalModelIdField_thenThrowException() { + String agentId = createAgent(registerAgentRequestBody.replace(modelId, "test_model_id")); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}")); + + org.hamcrest.MatcherAssert + .assertThat(exception.getMessage(), allOf(containsString("Failed to find model"), containsString("OpenSearchStatusException"))); + } +} diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_ragtool_with_neural_query_type_request_body.json b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_ragtool_with_neural_query_type_request_body.json new file mode 100644 index 00000000..7f8a4af2 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_ragtool_with_neural_query_type_request_body.json @@ -0,0 +1,23 @@ +{ + "name": "Test_Agent_For_RagTool", + "type": "flow", + "description": "this is a test flow agent in flow", + "tools": [ + { + "type": "RAGTool", + "description": "A description of the tool", + "parameters": { + "embedding_model_id": "", + "index": "", + "embedding_field": "embedding", + "query_type": "neural", + "enable_content_generation":"false", + "source_field": [ + "text" + ], + "input": "${parameters.question}", + "prompt": "\n\nHuman:You are a professional data analyst. You will always answer question based on the given context first. If the answer is not directly shown in the context, you will analyze the data and find the answer. If you don't know the answer, just say don't know. \n\n Context:\n${parameters.output_field}\n\nHuman:${parameters.question}\n\nAssistant:" + } + } + ] +} \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_vectordb_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_vectordb_tool_request_body.json new file mode 100644 index 00000000..3b13e443 --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_vectordb_tool_request_body.json @@ -0,0 +1,17 @@ +{ + "name": "Test_VectorDB_Agent", + "type": "flow", + "tools": [ + { + "type": "VectorDBTool", + "parameters": { + "description":"user this tool to search data from the test index", + "model_id": "", + "index": "test_index", + "embedding_field": "embedding", + "source_field": ["text"], + "input": "${parameters.question}" + } + } + ] +} \ No newline at end of file diff --git a/src/test/resources/org/opensearch/agent/tools/register_text_embedding_model_request_body.json b/src/test/resources/org/opensearch/agent/tools/register_text_embedding_model_request_body.json new file mode 100644 index 00000000..0173665a --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/register_text_embedding_model_request_body.json @@ -0,0 +1,14 @@ +{ + "name": "traced_small_model", + "version": "1.0.0", + "model_format": "TORCH_SCRIPT", + "model_task_type": "text_embedding", + "model_content_hash_value": "e13b74006290a9d0f58c1376f9629d4ebc05a0f9385f40db837452b167ae9021", + "model_config": { + "model_type": "bert", + "embedding_dimension": 768, + "framework_type": "sentence_transformers", + "all_config": "{\"architectures\":[\"BertModel\"],\"max_position_embeddings\":512,\"model_type\":\"bert\",\"num_attention_heads\":12,\"num_hidden_layers\":6}" + }, + "url": "https://github.com/opensearch-project/ml-commons/blob/2.x/ml-algorithms/src/test/resources/org/opensearch/ml/engine/algorithms/text_embedding/traced_small_model.zip?raw=true" +} \ No newline at end of file From 5088170394d31b576f8288c8d1337544759894fe Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Sat, 16 Mar 2024 11:32:00 +0800 Subject: [PATCH 078/119] update ppl tool claude prompts to use `` tags (#260) (#261) (cherry picked from commit 7ee5eec7843d765e104060881da338a728a869b1) Signed-off-by: Joshua Li Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../resources/org/opensearch/agent/tools/PPLDefaultPrompt.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json b/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json index 885e4679..98909cb3 100644 --- a/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json +++ b/src/main/resources/org/opensearch/agent/tools/PPLDefaultPrompt.json @@ -1,5 +1,5 @@ { - "CLAUDE": "\n\nHuman:You will be given a question about some metrics from a user.\nUse context provided to write a PPL query that can be used to retrieve the information.\n\nHere is a sample PPL query:\nsource=`` | where `` = '``'\n\nHere are some sample questions and the PPL query to retrieve the information. The format for fields is\n```\n- field_name: field_type (sample field value)\n```\n\nFor example, below is a field called `timestamp`, it has a field type of `date`, and a sample value of it could look like `1686000665919`.\n```\n- timestamp: date (1686000665919)\n```\n----------------\n\nThe following text contains fields and questions/answers for the 'accounts' index\n\nFields:\n- account_number: long (101)\n- address: text ('880 Holmes Lane')\n- age: long (32)\n- balance: long (39225)\n- city: text ('Brogan')\n- email: text ('amberduke@pyrami.com')\n- employer: text ('Pyrami')\n- firstname: text ('Amber')\n- gender: text ('M')\n- lastname: text ('Duke')\n- state: text ('IL')\n- registered_at: date (1686000665919)\n\nQuestion: Give me some documents in index 'accounts'\nPPL: source=`accounts` | head\n\nQuestion: Give me 5 oldest people in index 'accounts'\nPPL: source=`accounts` | sort -age | head 5\n\nQuestion: Give me first names of 5 youngest people in index 'accounts'\nPPL: source=`accounts` | sort +age | head 5 | fields `firstname`\n\nQuestion: Give me some addresses in index 'accounts'\nPPL: source=`accounts` | fields `address`\n\nQuestion: Find the documents in index 'accounts' where firstname is 'Hattie'\nPPL: source=`accounts` | where `firstname` = 'Hattie'\n\nQuestion: Find the emails where firstname is 'Hattie' or lastname is 'Frank' in index 'accounts'\nPPL: source=`accounts` | where `firstname` = 'Hattie' OR `lastname` = 'frank' | fields `email`\n\nQuestion: Find the documents in index 'accounts' where firstname is not 'Hattie' and lastname is not 'Frank'\nPPL: source=`accounts` | where `firstname` != 'Hattie' AND `lastname` != 'frank'\n\nQuestion: Find the emails that contain '.com' in index 'accounts'\nPPL: source=`accounts` | where QUERY_STRING(['email'], '.com') | fields `email`\n\nQuestion: Find the documents in index 'accounts' where there is an email\nPPL: source=`accounts` | where ISNOTNULL(`email`)\n\nQuestion: Count the number of documents in index 'accounts'\nPPL: source=`accounts` | stats COUNT() AS `count`\n\nQuestion: Count the number of people with firstnaQuestion: Count the number of people withe=`accounts` | where `firstname` ='Amber' | stats COUNT() AS `count`\n\nQuestion: How many people are older than 33? index is 'accounts'\nPPL: source=`accounts` | where `age` > 33 | stats COUNT() AS `count`\n\nQuestion: How many distinct ages? index is 'accounts'\nPPL: source=`accounts` | stats DISTINCT_COUNT(age) AS `distinct_count`\n\nQuestion: How many males and females in index 'accounts'?\nPPL: source=`accounts` | stats COUNT() AS `count` BY `gender`\n\nQuestion: What is the average, minimum, maximum age in 'accounts' index?\nPPL: source=`accounts` | stats AVG(`age`) AS `avg_age`, MIN(`age`) AS `min_age`, MAX(`age`) AS `max_age`\n\nQuestion: Show all states sorted by average balance. index is 'accounts'\nPPL: source=`accounts` | stats AVG(`balance`) AS `avg_balance` BY `state` | sort +avg_balance\n\n----------------\n\nThe following text contains fields and questions/answers for the 'ecommerce' index\n\nFields:\n- category: text ('Men's Clothing')\n- currency: keyword ('EUR')\n- customer_birth_date: date (null)\n- customer_first_name: text ('Eddie')\n- customer_full_name: text ('Eddie Underwood')\n- customer_gender: keyword ('MALE')\n- customer_id: keyword ('38')\n- customer_last_name: text ('Underwood')\n- customer_phone: keyword ('')\n- day_of_week: keyword ('Monday')\n- day_of_week_i: integer (0)\n- email: keyword ('eddie@underwood-family.zzz')\n- event.dataset: keyword ('sample_ecommerce')\n- geoip.city_name: keyword ('Cairo')\n- geoip.continent_name: keyword ('Africa')\n- geoip.country_iso_code: keyword ('EG')\n- geoip.location: geo_point ([object Object])\n- geoip.region_name: keyword ('Cairo Governorate')\n- manufacturer: text ('Elitelligence,Oceanavigations')\n- order_date: date (2023-06-05T09:28:48+00:00)\n- order_id: keyword ('584677')\n- products._id: text (null)\n- products.base_price: half_float (null)\n- products.base_unit_price: half_float (null)\n- products.category: text (null)\n- products.created_on: date (null)\n- products.discount_amount: half_float (null)\n- products.discount_percentage: half_float (null)\n- products.manufacturer: text (null)\n- products.min_price: half_float (null)\n- products.price: half_float (null)\n- products.product_id: long (null)\n- products.product_name: text (null)\n- products.quantity: integer (null)\n- products.sku: keyword (null)\n- products.tax_amount: half_float (null)\n- products.taxful_price: half_float (null)\n- products.taxless_price: half_float (null)\n- products.unit_discount_amount: half_float (null)\n- sku: keyword ('ZO0549605496,ZO0299602996')\n- taxful_total_price: half_float (36.98)\n- taxless_total_price: half_float (36.98)\n- total_quantity: integer (2)\n- total_unique_products: integer (2)\n- type: keyword ('order')\n- user: keyword ('eddie')\n\nQuestion: What is the average price of products in clothing category ordered in the last 7 days? index is 'ecommerce'\nPPL: source=`ecommerce` | where QUERY_STRING(['category'], 'clothing') AND `order_date` > DATE_SUB(NOW(), INTERVAL 7 DAY) | stats AVG(`taxful_total_price`) AS `avg_price`\n\nQuestion: What is the average price of products in each city ordered today by every 2 hours? index is 'ecommerce'\nPPL: source=`ecommerce` | where `order_date` > DATE_SUB(NOW(), INTERVAL 24 HOUR) | stats AVG(`taxful_total_price`) AS `avg_price` by SPAN(`order_date`, 2h) AS `span`, `geoip.city_name`\n\nQuestion: What is the total revenue of shoes each day in this week? index is 'ecommerce'\nPPL: source=`ecommerce` | where QUERY_STRING(['category'], 'shoes') AND `order_date` > DATE_SUB(NOW(), INTERVAL 1 WEEK) | stats SUM(`taxful_total_price`) AS `revenue` by SPAN(`order_date`, 1d) AS `span`\n\n----------------\n\nThe following text contains fields and questions/answers for the 'events' index\nFields:\n- timestamp: long (1686000665919)\n- attributes.data_stream.dataset: text ('nginx.access')\n- attributes.data_stream.namespace: text ('production')\n- attributes.data_stream.type: text ('logs')\n- body: text ('172.24.0.1 - - [02/Jun/2023:23:09:27 +0000] 'GET / HTTP/1.1' 200 4955 '-' 'Mozilla/5.0 zgrab/0.x'')\n- communication.source.address: text ('127.0.0.1')\n- communication.source.ip: text ('172.24.0.1')\n- container_id: text (null)\n- container_name: text (null)\n- event.category: text ('web')\n- event.domain: text ('nginx.access')\n- event.kind: text ('event')\n- event.name: text ('access')\n- event.result: text ('success')\n- event.type: text ('access')\n- http.flavor: text ('1.1')\n- http.request.method: text ('GET')\n- http.response.bytes: long (4955)\n- http.response.status_code: keyword ('200')\n- http.url: text ('/')\n- log: text (null)\n- observerTime: date (1686000665919)\n- source: text (null)\n- span_id: text ('abcdef1010')\n- trace_id: text ('102981ABCD2901')\n\nQuestion: What are recent logs with errors and contains word 'test'? index is 'events'\nPPL: source=`events` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') AND QUERY_STRING(['body'], 'test') AND `observerTime` > DATE_SUB(NOW(), INTERVAL 5 MINUTE)\n\nQuestion: What is the total number of log with a status code other than 200 in 2023 Feburary? index is 'events'\nPPL: source=`events` | where QUERY_STRING(['http.response.status_code'], '!200') AND `observerTime` >= '2023-03-01 00:00:00' AND `observerTime` < '2023-04-01 00:00:00' | stats COUNT() AS `count`\n\nQuestion: Count the number of business days that have web category logs last week? index is 'events'\nPPL: source=`events` | where `category` = 'web' AND `observerTime` > DATE_SUB(NOW(), INTERVAL 1 WEEK) AND DAY_OF_WEEK(`observerTime`) >= 2 AND DAY_OF_WEEK(`observerTime`) <= 6 | stats DISTINCT_COUNT(DATE_FORMAT(`observerTime`, 'yyyy-MM-dd')) AS `distinct_count`\n\nQuestion: What are the top traces with largest bytes? index is 'events'\nPPL: source=`events` | stats SUM(`http.response.bytes`) AS `sum_bytes` by `trace_id` | sort -sum_bytes | head\n\nQuestion: Give me log patterns? index is 'events'\nPPL: source=`events` | patterns `body` | stats take(`body`, 1) AS `sample_pattern` by `patterns_field` | fields `sample_pattern`\n\nQuestion: Give me log patterns for logs with errors? index is 'events'\nPPL: source=`events` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') | patterns `body` | stats take(`body`, 1) AS `sample_pattern` by `patterns_field` | fields `sample_pattern`\n\n----------------\n\nUse the following steps to generate the PPL query:\n\nStep 1. Find all field entities in the question.\n\nStep 2. Pick the fields that are relevant to the question from the provided fields list using entities. Rules:\n#01 Consider the field name, the field type, and the sample value when picking relevant fields. For example, if you need to filter flights departed from 'JFK', look for a `text` or `keyword` field with a field name such as 'departedAirport', and the sample value should be a 3 letter IATA airport code. Similarly, if you need a date field, look for a relevant field name with type `date` and not `long`.\n#02 You must pick a field with `date` type when filtering on date/time.\n#03 You must pick a field with `date` type when aggregating by time interval.\n#04 You must not use the sample value in PPL query, unless it is relevant to the question.\n#05 You must only pick fields that are relevant, and must pick the whole field name from the fields list.\n#06 You must not use fields that are not in the fields list.\n#07 You must not use the sample values unless relevant to the question.\n#08 You must pick the field that contains a log line when asked about log patterns. Usually it is one of `log`, `body`, `message`.\n\nStep 3. Use the choosen fields to write the PPL query. Rules:\n#01 Always use comparisons to filter date/time, eg. 'where `timestamp` > DATE_SUB(NOW(), INTERVAL 1 DAY)'; or by absolute time: 'where `timestamp` > 'yyyy-MM-dd HH:mm:ss'', eg. 'where `timestamp` < '2023-01-01 00:00:00''. Do not use `DATE_FORMAT()`.\n#02 Only use PPL syntax and keywords appeared in the question or in the examples.\n#03 If user asks for current or recent status, filter the time field for last 5 minutes.\n#04 The field used in 'SPAN(``, )' must have type `date`, not `long`.\n#05 When aggregating by `SPAN` and another field, put `SPAN` after `by` and before the other field, eg. 'stats COUNT() AS `count` by SPAN(`timestamp`, 1d) AS `span`, `category`'.\n#06 You must put values in quotes when filtering fields with `text` or `keyword` field type.\n#07 To find documents that contain certain phrases in string fields, use `QUERY_STRING` which supports multiple fields and wildcard, eg. 'where QUERY_STRING(['field1', 'field2'], 'prefix*')'.\n#08 To find 4xx and 5xx errors using status code, if the status code field type is numberic (eg. `integer`), then use 'where `status_code` >= 400'; if the field is a string (eg. `text` or `keyword`), then use 'where QUERY_STRING(['status_code'], '4* OR 5*')'.\n\n----------------\nPlease only contain PPL inside your response.\n----------------\nQuestion : ${indexInfo.question}? index is `${indexInfo.indexName}`\nFields:\n${indexInfo.mappingInfo}\n\nAssistant:", + "CLAUDE": "\n\nHuman:You will be given a question about some metrics from a user.\nUse context provided to write a PPL query that can be used to retrieve the information.\n\nHere is a sample PPL query:\nsource=`` | where `` = '``'\n\nHere are some sample questions and the PPL query to retrieve the information. The format for fields is\n```\n- field_name: field_type (sample field value)\n```\n\nFor example, below is a field called `timestamp`, it has a field type of `date`, and a sample value of it could look like `1686000665919`.\n```\n- timestamp: date (1686000665919)\n```\n----------------\n\nThe following text contains fields and questions/answers for the 'accounts' index\n\nFields:\n- account_number: long (101)\n- address: text ('880 Holmes Lane')\n- age: long (32)\n- balance: long (39225)\n- city: text ('Brogan')\n- email: text ('amberduke@pyrami.com')\n- employer: text ('Pyrami')\n- firstname: text ('Amber')\n- gender: text ('M')\n- lastname: text ('Duke')\n- state: text ('IL')\n- registered_at: date (1686000665919)\n\nQuestion: Give me some documents in index 'accounts'\nPPL: source=`accounts` | head\n\nQuestion: Give me 5 oldest people in index 'accounts'\nPPL: source=`accounts` | sort -age | head 5\n\nQuestion: Give me first names of 5 youngest people in index 'accounts'\nPPL: source=`accounts` | sort +age | head 5 | fields `firstname`\n\nQuestion: Give me some addresses in index 'accounts'\nPPL: source=`accounts` | fields `address`\n\nQuestion: Find the documents in index 'accounts' where firstname is 'Hattie'\nPPL: source=`accounts` | where `firstname` = 'Hattie'\n\nQuestion: Find the emails where firstname is 'Hattie' or lastname is 'Frank' in index 'accounts'\nPPL: source=`accounts` | where `firstname` = 'Hattie' OR `lastname` = 'frank' | fields `email`\n\nQuestion: Find the documents in index 'accounts' where firstname is not 'Hattie' and lastname is not 'Frank'\nPPL: source=`accounts` | where `firstname` != 'Hattie' AND `lastname` != 'frank'\n\nQuestion: Find the emails that contain '.com' in index 'accounts'\nPPL: source=`accounts` | where QUERY_STRING(['email'], '.com') | fields `email`\n\nQuestion: Find the documents in index 'accounts' where there is an email\nPPL: source=`accounts` | where ISNOTNULL(`email`)\n\nQuestion: Count the number of documents in index 'accounts'\nPPL: source=`accounts` | stats COUNT() AS `count`\n\nQuestion: Count the number of people with firstnaQuestion: Count the number of people withe=`accounts` | where `firstname` ='Amber' | stats COUNT() AS `count`\n\nQuestion: How many people are older than 33? index is 'accounts'\nPPL: source=`accounts` | where `age` > 33 | stats COUNT() AS `count`\n\nQuestion: How many distinct ages? index is 'accounts'\nPPL: source=`accounts` | stats DISTINCT_COUNT(age) AS `distinct_count`\n\nQuestion: How many males and females in index 'accounts'?\nPPL: source=`accounts` | stats COUNT() AS `count` BY `gender`\n\nQuestion: What is the average, minimum, maximum age in 'accounts' index?\nPPL: source=`accounts` | stats AVG(`age`) AS `avg_age`, MIN(`age`) AS `min_age`, MAX(`age`) AS `max_age`\n\nQuestion: Show all states sorted by average balance. index is 'accounts'\nPPL: source=`accounts` | stats AVG(`balance`) AS `avg_balance` BY `state` | sort +avg_balance\n\n----------------\n\nThe following text contains fields and questions/answers for the 'ecommerce' index\n\nFields:\n- category: text ('Men's Clothing')\n- currency: keyword ('EUR')\n- customer_birth_date: date (null)\n- customer_first_name: text ('Eddie')\n- customer_full_name: text ('Eddie Underwood')\n- customer_gender: keyword ('MALE')\n- customer_id: keyword ('38')\n- customer_last_name: text ('Underwood')\n- customer_phone: keyword ('')\n- day_of_week: keyword ('Monday')\n- day_of_week_i: integer (0)\n- email: keyword ('eddie@underwood-family.zzz')\n- event.dataset: keyword ('sample_ecommerce')\n- geoip.city_name: keyword ('Cairo')\n- geoip.continent_name: keyword ('Africa')\n- geoip.country_iso_code: keyword ('EG')\n- geoip.location: geo_point ([object Object])\n- geoip.region_name: keyword ('Cairo Governorate')\n- manufacturer: text ('Elitelligence,Oceanavigations')\n- order_date: date (2023-06-05T09:28:48+00:00)\n- order_id: keyword ('584677')\n- products._id: text (null)\n- products.base_price: half_float (null)\n- products.base_unit_price: half_float (null)\n- products.category: text (null)\n- products.created_on: date (null)\n- products.discount_amount: half_float (null)\n- products.discount_percentage: half_float (null)\n- products.manufacturer: text (null)\n- products.min_price: half_float (null)\n- products.price: half_float (null)\n- products.product_id: long (null)\n- products.product_name: text (null)\n- products.quantity: integer (null)\n- products.sku: keyword (null)\n- products.tax_amount: half_float (null)\n- products.taxful_price: half_float (null)\n- products.taxless_price: half_float (null)\n- products.unit_discount_amount: half_float (null)\n- sku: keyword ('ZO0549605496,ZO0299602996')\n- taxful_total_price: half_float (36.98)\n- taxless_total_price: half_float (36.98)\n- total_quantity: integer (2)\n- total_unique_products: integer (2)\n- type: keyword ('order')\n- user: keyword ('eddie')\n\nQuestion: What is the average price of products in clothing category ordered in the last 7 days? index is 'ecommerce'\nPPL: source=`ecommerce` | where QUERY_STRING(['category'], 'clothing') AND `order_date` > DATE_SUB(NOW(), INTERVAL 7 DAY) | stats AVG(`taxful_total_price`) AS `avg_price`\n\nQuestion: What is the average price of products in each city ordered today by every 2 hours? index is 'ecommerce'\nPPL: source=`ecommerce` | where `order_date` > DATE_SUB(NOW(), INTERVAL 24 HOUR) | stats AVG(`taxful_total_price`) AS `avg_price` by SPAN(`order_date`, 2h) AS `span`, `geoip.city_name`\n\nQuestion: What is the total revenue of shoes each day in this week? index is 'ecommerce'\nPPL: source=`ecommerce` | where QUERY_STRING(['category'], 'shoes') AND `order_date` > DATE_SUB(NOW(), INTERVAL 1 WEEK) | stats SUM(`taxful_total_price`) AS `revenue` by SPAN(`order_date`, 1d) AS `span`\n\n----------------\n\nThe following text contains fields and questions/answers for the 'events' index\nFields:\n- timestamp: long (1686000665919)\n- attributes.data_stream.dataset: text ('nginx.access')\n- attributes.data_stream.namespace: text ('production')\n- attributes.data_stream.type: text ('logs')\n- body: text ('172.24.0.1 - - [02/Jun/2023:23:09:27 +0000] 'GET / HTTP/1.1' 200 4955 '-' 'Mozilla/5.0 zgrab/0.x'')\n- communication.source.address: text ('127.0.0.1')\n- communication.source.ip: text ('172.24.0.1')\n- container_id: text (null)\n- container_name: text (null)\n- event.category: text ('web')\n- event.domain: text ('nginx.access')\n- event.kind: text ('event')\n- event.name: text ('access')\n- event.result: text ('success')\n- event.type: text ('access')\n- http.flavor: text ('1.1')\n- http.request.method: text ('GET')\n- http.response.bytes: long (4955)\n- http.response.status_code: keyword ('200')\n- http.url: text ('/')\n- log: text (null)\n- observerTime: date (1686000665919)\n- source: text (null)\n- span_id: text ('abcdef1010')\n- trace_id: text ('102981ABCD2901')\n\nQuestion: What are recent logs with errors and contains word 'test'? index is 'events'\nPPL: source=`events` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') AND QUERY_STRING(['body'], 'test') AND `observerTime` > DATE_SUB(NOW(), INTERVAL 5 MINUTE)\n\nQuestion: What is the total number of log with a status code other than 200 in 2023 Feburary? index is 'events'\nPPL: source=`events` | where QUERY_STRING(['http.response.status_code'], '!200') AND `observerTime` >= '2023-03-01 00:00:00' AND `observerTime` < '2023-04-01 00:00:00' | stats COUNT() AS `count`\n\nQuestion: Count the number of business days that have web category logs last week? index is 'events'\nPPL: source=`events` | where `category` = 'web' AND `observerTime` > DATE_SUB(NOW(), INTERVAL 1 WEEK) AND DAY_OF_WEEK(`observerTime`) >= 2 AND DAY_OF_WEEK(`observerTime`) <= 6 | stats DISTINCT_COUNT(DATE_FORMAT(`observerTime`, 'yyyy-MM-dd')) AS `distinct_count`\n\nQuestion: What are the top traces with largest bytes? index is 'events'\nPPL: source=`events` | stats SUM(`http.response.bytes`) AS `sum_bytes` by `trace_id` | sort -sum_bytes | head\n\nQuestion: Give me log patterns? index is 'events'\nPPL: source=`events` | patterns `body` | stats take(`body`, 1) AS `sample_pattern` by `patterns_field` | fields `sample_pattern`\n\nQuestion: Give me log patterns for logs with errors? index is 'events'\nPPL: source=`events` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') | patterns `body` | stats take(`body`, 1) AS `sample_pattern` by `patterns_field` | fields `sample_pattern`\n\n----------------\n\nUse the following steps to generate the PPL query:\n\nStep 1. Find all field entities in the question.\n\nStep 2. Pick the fields that are relevant to the question from the provided fields list using entities. Rules:\n#01 Consider the field name, the field type, and the sample value when picking relevant fields. For example, if you need to filter flights departed from 'JFK', look for a `text` or `keyword` field with a field name such as 'departedAirport', and the sample value should be a 3 letter IATA airport code. Similarly, if you need a date field, look for a relevant field name with type `date` and not `long`.\n#02 You must pick a field with `date` type when filtering on date/time.\n#03 You must pick a field with `date` type when aggregating by time interval.\n#04 You must not use the sample value in PPL query, unless it is relevant to the question.\n#05 You must only pick fields that are relevant, and must pick the whole field name from the fields list.\n#06 You must not use fields that are not in the fields list.\n#07 You must not use the sample values unless relevant to the question.\n#08 You must pick the field that contains a log line when asked about log patterns. Usually it is one of `log`, `body`, `message`.\n\nStep 3. Use the choosen fields to write the PPL query. Rules:\n#01 Always use comparisons to filter date/time, eg. 'where `timestamp` > DATE_SUB(NOW(), INTERVAL 1 DAY)'; or by absolute time: 'where `timestamp` > 'yyyy-MM-dd HH:mm:ss'', eg. 'where `timestamp` < '2023-01-01 00:00:00''. Do not use `DATE_FORMAT()`.\n#02 Only use PPL syntax and keywords appeared in the question or in the examples.\n#03 If user asks for current or recent status, filter the time field for last 5 minutes.\n#04 The field used in 'SPAN(``, )' must have type `date`, not `long`.\n#05 When aggregating by `SPAN` and another field, put `SPAN` after `by` and before the other field, eg. 'stats COUNT() AS `count` by SPAN(`timestamp`, 1d) AS `span`, `category`'.\n#06 You must put values in quotes when filtering fields with `text` or `keyword` field type.\n#07 To find documents that contain certain phrases in string fields, use `QUERY_STRING` which supports multiple fields and wildcard, eg. 'where QUERY_STRING(['field1', 'field2'], 'prefix*')'.\n#08 To find 4xx and 5xx errors using status code, if the status code field type is numberic (eg. `integer`), then use 'where `status_code` >= 400'; if the field is a string (eg. `text` or `keyword`), then use 'where QUERY_STRING(['status_code'], '4* OR 5*')'.\n\n----------------\nPut your PPL query in tags.\n----------------\nQuestion : ${indexInfo.question}? index is `${indexInfo.indexName}`\nFields:\n${indexInfo.mappingInfo}\n\nAssistant:", "FINETUNE": "Below is an instruction that describes a task, paired with the index and corresponding fields that provides further context. Write a response that appropriately completes the request.\n\n### Instruction:\nI have an opensearch index with fields in the following. Now I have a question: ${indexInfo.question}. Can you help me generate a PPL for that?\n\n### Index:\n${indexInfo.indexName}\n\n### Fields:\n${indexInfo.mappingInfo}\n\n### Response:\n", "OPENAI": "You will be given a question about some metrics from a user.\nUse context provided to write a PPL query that can be used to retrieve the information.\n\nHere is a sample PPL query:\nsource=`` | where `` = '``'\n\nHere are some sample questions and the PPL query to retrieve the information. The format for fields is\n```\n- field_name: field_type (sample field value)\n```\n\nFor example, below is a field called `timestamp`, it has a field type of `date`, and a sample value of it could look like `1686000665919`.\n```\n- timestamp: date (1686000665919)\n```\n----------------\n\nThe following text contains fields and questions/answers for the 'accounts' index\n\nFields:\n- account_number: long (101)\n- address: text ('880 Holmes Lane')\n- age: long (32)\n- balance: long (39225)\n- city: text ('Brogan')\n- email: text ('amberduke@pyrami.com')\n- employer: text ('Pyrami')\n- firstname: text ('Amber')\n- gender: text ('M')\n- lastname: text ('Duke')\n- state: text ('IL')\n- registered_at: date (1686000665919)\n\nQuestion: Give me some documents in index 'accounts'\nPPL: source=`accounts` | head\n\nQuestion: Give me 5 oldest people in index 'accounts'\nPPL: source=`accounts` | sort -age | head 5\n\nQuestion: Give me first names of 5 youngest people in index 'accounts'\nPPL: source=`accounts` | sort +age | head 5 | fields `firstname`\n\nQuestion: Give me some addresses in index 'accounts'\nPPL: source=`accounts` | fields `address`\n\nQuestion: Find the documents in index 'accounts' where firstname is 'Hattie'\nPPL: source=`accounts` | where `firstname` = 'Hattie'\n\nQuestion: Find the emails where firstname is 'Hattie' or lastname is 'Frank' in index 'accounts'\nPPL: source=`accounts` | where `firstname` = 'Hattie' OR `lastname` = 'frank' | fields `email`\n\nQuestion: Find the documents in index 'accounts' where firstname is not 'Hattie' and lastname is not 'Frank'\nPPL: source=`accounts` | where `firstname` != 'Hattie' AND `lastname` != 'frank'\n\nQuestion: Find the emails that contain '.com' in index 'accounts'\nPPL: source=`accounts` | where QUERY_STRING(['email'], '.com') | fields `email`\n\nQuestion: Find the documents in index 'accounts' where there is an email\nPPL: source=`accounts` | where ISNOTNULL(`email`)\n\nQuestion: Count the number of documents in index 'accounts'\nPPL: source=`accounts` | stats COUNT() AS `count`\n\nQuestion: Count the number of people with firstnaQuestion: Count the number of people withe=`accounts` | where `firstname` ='Amber' | stats COUNT() AS `count`\n\nQuestion: How many people are older than 33? index is 'accounts'\nPPL: source=`accounts` | where `age` > 33 | stats COUNT() AS `count`\n\nQuestion: How many distinct ages? index is 'accounts'\nPPL: source=`accounts` | stats DISTINCT_COUNT(age) AS `distinct_count`\n\nQuestion: How many males and females in index 'accounts'?\nPPL: source=`accounts` | stats COUNT() AS `count` BY `gender`\n\nQuestion: What is the average, minimum, maximum age in 'accounts' index?\nPPL: source=`accounts` | stats AVG(`age`) AS `avg_age`, MIN(`age`) AS `min_age`, MAX(`age`) AS `max_age`\n\nQuestion: Show all states sorted by average balance. index is 'accounts'\nPPL: source=`accounts` | stats AVG(`balance`) AS `avg_balance` BY `state` | sort +avg_balance\n\n----------------\n\nThe following text contains fields and questions/answers for the 'ecommerce' index\n\nFields:\n- category: text ('Men's Clothing')\n- currency: keyword ('EUR')\n- customer_birth_date: date (null)\n- customer_first_name: text ('Eddie')\n- customer_full_name: text ('Eddie Underwood')\n- customer_gender: keyword ('MALE')\n- customer_id: keyword ('38')\n- customer_last_name: text ('Underwood')\n- customer_phone: keyword ('')\n- day_of_week: keyword ('Monday')\n- day_of_week_i: integer (0)\n- email: keyword ('eddie@underwood-family.zzz')\n- event.dataset: keyword ('sample_ecommerce')\n- geoip.city_name: keyword ('Cairo')\n- geoip.continent_name: keyword ('Africa')\n- geoip.country_iso_code: keyword ('EG')\n- geoip.location: geo_point ([object Object])\n- geoip.region_name: keyword ('Cairo Governorate')\n- manufacturer: text ('Elitelligence,Oceanavigations')\n- order_date: date (2023-06-05T09:28:48+00:00)\n- order_id: keyword ('584677')\n- products._id: text (null)\n- products.base_price: half_float (null)\n- products.base_unit_price: half_float (null)\n- products.category: text (null)\n- products.created_on: date (null)\n- products.discount_amount: half_float (null)\n- products.discount_percentage: half_float (null)\n- products.manufacturer: text (null)\n- products.min_price: half_float (null)\n- products.price: half_float (null)\n- products.product_id: long (null)\n- products.product_name: text (null)\n- products.quantity: integer (null)\n- products.sku: keyword (null)\n- products.tax_amount: half_float (null)\n- products.taxful_price: half_float (null)\n- products.taxless_price: half_float (null)\n- products.unit_discount_amount: half_float (null)\n- sku: keyword ('ZO0549605496,ZO0299602996')\n- taxful_total_price: half_float (36.98)\n- taxless_total_price: half_float (36.98)\n- total_quantity: integer (2)\n- total_unique_products: integer (2)\n- type: keyword ('order')\n- user: keyword ('eddie')\n\nQuestion: What is the average price of products in clothing category ordered in the last 7 days? index is 'ecommerce'\nPPL: source=`ecommerce` | where QUERY_STRING(['category'], 'clothing') AND `order_date` > DATE_SUB(NOW(), INTERVAL 7 DAY) | stats AVG(`taxful_total_price`) AS `avg_price`\n\nQuestion: What is the average price of products in each city ordered today by every 2 hours? index is 'ecommerce'\nPPL: source=`ecommerce` | where `order_date` > DATE_SUB(NOW(), INTERVAL 24 HOUR) | stats AVG(`taxful_total_price`) AS `avg_price` by SPAN(`order_date`, 2h) AS `span`, `geoip.city_name`\n\nQuestion: What is the total revenue of shoes each day in this week? index is 'ecommerce'\nPPL: source=`ecommerce` | where QUERY_STRING(['category'], 'shoes') AND `order_date` > DATE_SUB(NOW(), INTERVAL 1 WEEK) | stats SUM(`taxful_total_price`) AS `revenue` by SPAN(`order_date`, 1d) AS `span`\n\n----------------\n\nThe following text contains fields and questions/answers for the 'events' index\nFields:\n- timestamp: long (1686000665919)\n- attributes.data_stream.dataset: text ('nginx.access')\n- attributes.data_stream.namespace: text ('production')\n- attributes.data_stream.type: text ('logs')\n- body: text ('172.24.0.1 - - [02/Jun/2023:23:09:27 +0000] 'GET / HTTP/1.1' 200 4955 '-' 'Mozilla/5.0 zgrab/0.x'')\n- communication.source.address: text ('127.0.0.1')\n- communication.source.ip: text ('172.24.0.1')\n- container_id: text (null)\n- container_name: text (null)\n- event.category: text ('web')\n- event.domain: text ('nginx.access')\n- event.kind: text ('event')\n- event.name: text ('access')\n- event.result: text ('success')\n- event.type: text ('access')\n- http.flavor: text ('1.1')\n- http.request.method: text ('GET')\n- http.response.bytes: long (4955)\n- http.response.status_code: keyword ('200')\n- http.url: text ('/')\n- log: text (null)\n- observerTime: date (1686000665919)\n- source: text (null)\n- span_id: text ('abcdef1010')\n- trace_id: text ('102981ABCD2901')\n\nQuestion: What are recent logs with errors and contains word 'test'? index is 'events'\nPPL: source=`events` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') AND QUERY_STRING(['body'], 'test') AND `observerTime` > DATE_SUB(NOW(), INTERVAL 5 MINUTE)\n\nQuestion: What is the total number of log with a status code other than 200 in 2023 Feburary? index is 'events'\nPPL: source=`events` | where QUERY_STRING(['http.response.status_code'], '!200') AND `observerTime` >= '2023-03-01 00:00:00' AND `observerTime` < '2023-04-01 00:00:00' | stats COUNT() AS `count`\n\nQuestion: Count the number of business days that have web category logs last week? index is 'events'\nPPL: source=`events` | where `category` = 'web' AND `observerTime` > DATE_SUB(NOW(), INTERVAL 1 WEEK) AND DAY_OF_WEEK(`observerTime`) >= 2 AND DAY_OF_WEEK(`observerTime`) <= 6 | stats DISTINCT_COUNT(DATE_FORMAT(`observerTime`, 'yyyy-MM-dd')) AS `distinct_count`\n\nQuestion: What are the top traces with largest bytes? index is 'events'\nPPL: source=`events` | stats SUM(`http.response.bytes`) AS `sum_bytes` by `trace_id` | sort -sum_bytes | head\n\nQuestion: Give me log patterns? index is 'events'\nPPL: source=`events` | patterns `body` | stats take(`body`, 1) AS `sample_pattern` by `patterns_field` | fields `sample_pattern`\n\nQuestion: Give me log patterns for logs with errors? index is 'events'\nPPL: source=`events` | where QUERY_STRING(['http.response.status_code'], '4* OR 5*') | patterns `body` | stats take(`body`, 1) AS `sample_pattern` by `patterns_field` | fields `sample_pattern`\n\n----------------\n\nUse the following steps to generate the PPL query:\n\nStep 1. Find all field entities in the question.\n\nStep 2. Pick the fields that are relevant to the question from the provided fields list using entities. Rules:\n#01 Consider the field name, the field type, and the sample value when picking relevant fields. For example, if you need to filter flights departed from 'JFK', look for a `text` or `keyword` field with a field name such as 'departedAirport', and the sample value should be a 3 letter IATA airport code. Similarly, if you need a date field, look for a relevant field name with type `date` and not `long`.\n#02 You must pick a field with `date` type when filtering on date/time.\n#03 You must pick a field with `date` type when aggregating by time interval.\n#04 You must not use the sample value in PPL query, unless it is relevant to the question.\n#05 You must only pick fields that are relevant, and must pick the whole field name from the fields list.\n#06 You must not use fields that are not in the fields list.\n#07 You must not use the sample values unless relevant to the question.\n#08 You must pick the field that contains a log line when asked about log patterns. Usually it is one of `log`, `body`, `message`.\n\nStep 3. Use the choosen fields to write the PPL query. Rules:\n#01 Always use comparisons to filter date/time, eg. 'where `timestamp` > DATE_SUB(NOW(), INTERVAL 1 DAY)'; or by absolute time: 'where `timestamp` > 'yyyy-MM-dd HH:mm:ss'', eg. 'where `timestamp` < '2023-01-01 00:00:00''. Do not use `DATE_FORMAT()`.\n#02 Only use PPL syntax and keywords appeared in the question or in the examples.\n#03 If user asks for current or recent status, filter the time field for last 5 minutes.\n#04 The field used in 'SPAN(``, )' must have type `date`, not `long`.\n#05 When aggregating by `SPAN` and another field, put `SPAN` after `by` and before the other field, eg. 'stats COUNT() AS `count` by SPAN(`timestamp`, 1d) AS `span`, `category`'.\n#06 You must put values in quotes when filtering fields with `text` or `keyword` field type.\n#07 To find documents that contain certain phrases in string fields, use `QUERY_STRING` which supports multiple fields and wildcard, eg. 'where QUERY_STRING(['field1', 'field2'], 'prefix*')'.\n#08 To find 4xx and 5xx errors using status code, if the status code field type is numberic (eg. `integer`), then use 'where `status_code` >= 400'; if the field is a string (eg. `text` or `keyword`), then use 'where QUERY_STRING(['status_code'], '4* OR 5*')'.\n\n----------------\nOutput format: use xml tags to surround your PPL query, eg. source=index.\n----------------\nQuestion : ${indexInfo.question}? index is `${indexInfo.indexName}`\nFields:\n${indexInfo.mappingInfo}" } From 64c5e2e96bc00dc4217e9235c9272d78bc024593 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 16:32:14 +0800 Subject: [PATCH 079/119] add parameter validate for PPL tool (#259) (#262) * add parameter validate for PPL tool * apply spotless * simplify code * prevent NPE * change logic * apply spot * fix UT * apply spotless --------- (cherry picked from commit 571dab7d671036b06e859075c593033a20fe22cb) Signed-off-by: xinyual Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../org/opensearch/agent/tools/PPLTool.java | 24 +++++++++++ .../opensearch/agent/tools/PPLToolTests.java | 42 ++++++++++++++++++- 2 files changed, 64 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index c70bf030..b2c162a2 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -18,12 +18,14 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.StringJoiner; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.math.NumberUtils; import org.apache.commons.text.StringSubstitutor; import org.json.JSONObject; import org.opensearch.action.ActionRequest; @@ -312,6 +314,7 @@ public void init(Client client) { @Override public PPLTool create(Map map) { + validatePPLToolParameters(map); return new PPLTool( client, (String) map.get("model_id"), @@ -355,6 +358,27 @@ private GetMappingsRequest buildGetMappingRequest(String indexName) { return getMappingsRequest; } + private static void validatePPLToolParameters(Map map) { + if (StringUtils.isBlank((String) map.get("model_id"))) { + throw new IllegalArgumentException("PPL tool needs non blank model id."); + } + if (map.containsKey("execute") && Objects.nonNull(map.get("execute"))) { + String execute = map.get("execute").toString().toLowerCase(Locale.ROOT); + if (!execute.equals("true") && !execute.equals("false")) { + throw new IllegalArgumentException("PPL tool parameter execute must be false or true"); + } + + } + if (map.containsKey("head")) { + String head = map.get("head").toString(); + try { + int headInt = NumberUtils.createInteger(head); + } catch (Exception e) { + throw new IllegalArgumentException("PPL tool parameter head must be integer."); + } + } + } + private String constructTableInfo(SearchHit[] searchHits, Map mappings) throws PrivilegedActionException { String firstIndexName = (String) mappings.keySet().toArray()[0]; MappingMetadata mappingMetadata = mappings.get(firstIndexName); diff --git a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java index 4c73817b..25fe62a9 100644 --- a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java @@ -125,9 +125,47 @@ public void setup() { PPLTool.Factory.getInstance().init(client); } + @Test + public void testTool_WithoutModelId() { + Exception exception = assertThrows( + IllegalArgumentException.class, + () -> PPLTool.Factory.getInstance().create(ImmutableMap.of("prompt", "contextPrompt")) + ); + assertEquals("PPL tool needs non blank model id.", exception.getMessage()); + } + + @Test + public void testTool_WithBlankModelId() { + Exception exception = assertThrows( + IllegalArgumentException.class, + () -> PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", " ")) + ); + assertEquals("PPL tool needs non blank model id.", exception.getMessage()); + } + + @Test + public void testTool_WithNonIntegerHead() { + Exception exception = assertThrows( + IllegalArgumentException.class, + () -> PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "demo", "head", "11.5")) + ); + assertEquals("PPL tool parameter head must be integer.", exception.getMessage()); + } + + @Test + public void testTool_WithNonBooleanExecute() { + Exception exception = assertThrows( + IllegalArgumentException.class, + () -> PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "demo", "execute", "hello")) + ); + assertEquals("PPL tool parameter execute must be false or true", exception.getMessage()); + } + @Test public void testTool() { - PPLTool tool = PPLTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt")); + PPLTool tool = PPLTool.Factory + .getInstance() + .create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt", "head", "100")); assertEquals(PPLTool.TYPE, tool.getName()); tool.run(ImmutableMap.of("index", "demo", "question", "demo"), ActionListener.wrap(executePPLResult -> { @@ -142,7 +180,7 @@ public void testTool() { public void testTool_withPreviousInput() { PPLTool tool = PPLTool.Factory .getInstance() - .create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt", "previous_tool_name", "previousTool")); + .create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt", "previous_tool_name", "previousTool", "head", "-5")); assertEquals(PPLTool.TYPE, tool.getName()); tool.run(ImmutableMap.of("previousTool.output", "demo", "question", "demo"), ActionListener.wrap(executePPLResult -> { From 357be376f4a072ee506f5dbde3895359bb6c2744 Mon Sep 17 00:00:00 2001 From: zane-neo Date: Thu, 21 Mar 2024 08:59:36 +0800 Subject: [PATCH 080/119] change release date (#267) Signed-off-by: zane-neo --- .../opensearch-skills.release-notes-2.13.0.0.md | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 release-notes/opensearch-skills.release-notes-2.13.0.0.md diff --git a/release-notes/opensearch-skills.release-notes-2.13.0.0.md b/release-notes/opensearch-skills.release-notes-2.13.0.0.md new file mode 100644 index 00000000..9c6937e0 --- /dev/null +++ b/release-notes/opensearch-skills.release-notes-2.13.0.0.md @@ -0,0 +1,9 @@ +# 2024-03-20 Version 2.13.0.0 + +Compatible with OpenSearch 2.13.0 + +### Features +* Fix SearchAnomalyDetectorsTool indices param bug +* Fix detector state params in SearchAnomalyDetectorsTool +* Update ppl tool claude model prompts to use tags +* Add parameter validation for PPL tool From 9f1bd35857e109644088211e9305e16fdcb7dae1 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Thu, 21 Mar 2024 11:29:35 +0800 Subject: [PATCH 081/119] update 2.13 release note add dependency (#269) (#270) * update 2.13 release note * update 2.13 release note * Delete empty file --------- (cherry picked from commit 02a32098d348ed7e2b54cf0e74e77e50281215d2) Signed-off-by: zane-neo Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../opensearch-skills.release-notes-2.13.0.0.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/release-notes/opensearch-skills.release-notes-2.13.0.0.md b/release-notes/opensearch-skills.release-notes-2.13.0.0.md index 9c6937e0..04a0b668 100644 --- a/release-notes/opensearch-skills.release-notes-2.13.0.0.md +++ b/release-notes/opensearch-skills.release-notes-2.13.0.0.md @@ -7,3 +7,12 @@ Compatible with OpenSearch 2.13.0 * Fix detector state params in SearchAnomalyDetectorsTool * Update ppl tool claude model prompts to use tags * Add parameter validation for PPL tool + +### Dependencies +* Update mockito monorepo to v5.10.0 (#128) (#197) +* Update dependency org.apache.commons:commons-lang3 to v3.14.0 (#47) +* Update dependency org.apache.commons:commons-text to v1.11.0 (#62) +* Update plugin io.freefair.lombok to v8.6 (#245) (#249) +* Update plugin de.undercouch.download to v5.6.0 (#239) (#250) +* Update plugin com.diffplug.spotless to v6.25.0 (#127) (#252) +* Update dependency org.json:json to v20240205 (#246) (#251) From 76dcbcb9409797cb51dddd3061ff664f56c89d2e Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Thu, 18 Apr 2024 14:15:11 +0800 Subject: [PATCH 082/119] fix filter fields, adding geo point and date_nanos (#285) (#286) * fix filter fields, adding geo point and date_nanos * apply spotless --------- (cherry picked from commit b55adde7dcef331cd7cee40d461b52788faa0968) Signed-off-by: xinyual Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- src/main/java/org/opensearch/agent/tools/PPLTool.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index b2c162a2..2108aa37 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -107,6 +107,7 @@ public class PPLTool implements Tool { static { ALLOWED_FIELDS_TYPE = new HashSet<>(); // from // https://github.com/opensearch-project/sql/blob/2.x/docs/user/ppl/general/datatypes.rst#data-types-mapping + // and https://opensearch.org/docs/latest/field-types/supported-field-types/index/ ALLOWED_FIELDS_TYPE.add("boolean"); ALLOWED_FIELDS_TYPE.add("byte"); ALLOWED_FIELDS_TYPE.add("short"); @@ -119,10 +120,12 @@ public class PPLTool implements Tool { ALLOWED_FIELDS_TYPE.add("keyword"); ALLOWED_FIELDS_TYPE.add("text"); ALLOWED_FIELDS_TYPE.add("date"); + ALLOWED_FIELDS_TYPE.add("date_nanos"); ALLOWED_FIELDS_TYPE.add("ip"); ALLOWED_FIELDS_TYPE.add("binary"); ALLOWED_FIELDS_TYPE.add("object"); ALLOWED_FIELDS_TYPE.add("nested"); + ALLOWED_FIELDS_TYPE.add("geo_point"); try { DEFAULT_PROMPT_DICT = loadDefaultPromptDict(); From de2e2d7f1c1fcb47a496f4805537d1bd37eead02 Mon Sep 17 00:00:00 2001 From: zane-neo Date: Thu, 18 Apr 2024 15:38:58 +0800 Subject: [PATCH 083/119] Increment version to 2.13; change ad plugin jar dependency; increment byte-buddy version (#288) * Increament version to 2.13; change ad plugin jar dependency; increment byte-buddy version Signed-off-by: zane-neo * fix AD tests compilation failure issue Signed-off-by: zane-neo --------- Signed-off-by: zane-neo --- build.gradle | 15 +++++++-------- .../tools/SearchAnomalyDetectorsToolTests.java | 7 ++++--- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/build.gradle b/build.gradle index c8a2bf78..6d8a2e76 100644 --- a/build.gradle +++ b/build.gradle @@ -5,14 +5,13 @@ import org.opensearch.gradle.test.RestIntegTestTask import java.util.concurrent.Callable -import org.opensearch.gradle.testclusters.OpenSearchCluster import java.nio.file.Paths import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin buildscript { ext { opensearch_group = "org.opensearch" - opensearch_version = System.getProperty("opensearch.version", "2.12.0-SNAPSHOT") + opensearch_version = System.getProperty("opensearch.version", "2.13.0-SNAPSHOT") buildVersionQualifier = System.getProperty("build.version_qualifier", "") isSnapshot = "true" == System.getProperty("build.snapshot", "true") version_tokens = opensearch_version.tokenize('-') @@ -54,9 +53,9 @@ lombok { repositories { mavenLocal() - mavenCentral() - maven { url "https://plugins.gradle.org/m2/" } maven { url "https://aws.oss.sonatype.org/content/repositories/snapshots" } + maven { url "https://plugins.gradle.org/m2/" } + mavenCentral() } allprojects { @@ -108,7 +107,7 @@ task addJarsToClasspath(type: Copy) { into("$buildDir/classes") from(fileTree(dir: adJarDirectory)) { - include "opensearch-anomaly-detection-${opensearch_build}.jar" + include "opensearch-time-series-analytics-${opensearch_build}.jar" } into("$buildDir/classes") } @@ -125,7 +124,7 @@ dependencies { // Plugin dependencies compileOnly group: 'org.opensearch', name:'opensearch-ml-client', version: "${opensearch_build}" implementation fileTree(dir: jsJarDirectory, include: ["opensearch-job-scheduler-${opensearch_build}.jar"]) - implementation fileTree(dir: adJarDirectory, include: ["opensearch-anomaly-detection-${opensearch_build}.jar"]) + implementation fileTree(dir: adJarDirectory, include: ["opensearch-time-series-analytics-${opensearch_build}.jar"]) implementation fileTree(dir: sqlJarDirectory, include: ["opensearch-sql-${opensearch_build}.jar", "ppl-${opensearch_build}.jar", "protocol-${opensearch_build}.jar"]) compileOnly "org.opensearch:common-utils:${opensearch_build}" compileOnly "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}" @@ -148,7 +147,7 @@ dependencies { testImplementation group: 'org.json', name: 'json', version: '20240205' testImplementation group: 'org.mockito', name: 'mockito-core', version: '5.10.0' testImplementation group: 'org.mockito', name: 'mockito-inline', version: '5.2.0' - testImplementation("net.bytebuddy:byte-buddy:1.14.7") + testImplementation("net.bytebuddy:byte-buddy:1.14.9") testImplementation("net.bytebuddy:byte-buddy-agent:1.14.7") testImplementation 'org.junit.jupiter:junit-jupiter-api:5.10.1' testImplementation 'org.mockito:mockito-junit-jupiter:5.10.0' @@ -569,4 +568,4 @@ task updateVersion { // Include the required files that needs to be updated with new Version ant.replaceregexp(file:'build.gradle', match: '"opensearch.version", "\\d.*"', replace: '"opensearch.version", "' + newVersion.tokenize('-')[0] + '-SNAPSHOT"', flags:'g', byline:true) } -} \ No newline at end of file +} diff --git a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java index 8bf4de56..8a316930 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java @@ -32,7 +32,6 @@ import org.opensearch.action.ActionType; import org.opensearch.action.search.SearchResponse; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.IntervalTimeConfiguration; import org.opensearch.ad.transport.GetAnomalyDetectorAction; import org.opensearch.ad.transport.GetAnomalyDetectorResponse; import org.opensearch.ad.transport.SearchAnomalyDetectorAction; @@ -46,6 +45,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.ml.common.spi.tools.Tool; import org.opensearch.search.SearchHit; +import org.opensearch.timeseries.model.IntervalTimeConfiguration; public class SearchAnomalyDetectorsToolTests { @Mock @@ -85,6 +85,7 @@ public void setup() { Instant.now(), Collections.emptyList(), null, + null, null ); } @@ -125,7 +126,7 @@ public void testRunWithSingleAnomalyDetector() throws Exception { content.field("last_update_time", testDetector.getLastUpdateTime().toEpochMilli()); content.endObject(); SearchHit[] hits = new SearchHit[1]; - hits[0] = new SearchHit(0, testDetector.getDetectorId(), null, null).sourceRef(BytesReference.bytes(content)); + hits[0] = new SearchHit(0, testDetector.getId(), null, null).sourceRef(BytesReference.bytes(content)); SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); String expectedResponseStr = getExpectedResponseString(testDetector); @@ -460,7 +461,7 @@ private String getExpectedResponseString(AnomalyDetector testDetector) { return String .format( "AnomalyDetectors=[{id=%s,name=%s,type=%s,description=%s,index=%s,lastUpdateTime=%d}]TotalAnomalyDetectors=%d", - testDetector.getDetectorId(), + testDetector.getId(), testDetector.getName(), testDetector.getDetectorType(), testDetector.getDescription(), From eeb0d89f0bddcb9959934491858c600d257460d9 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Thu, 18 Apr 2024 16:58:37 +0800 Subject: [PATCH 084/119] remove logic about replace quota for finetuning model (#289) (#291) * remove logic about replace quota for finetuning model * change logic to omit replace on finetune * change equal logic * fix logic error --------- (cherry picked from commit 44b7adcfeae1981ffb0c8bccd589ef8937e1435d) Signed-off-by: xinyual Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- src/main/java/org/opensearch/agent/tools/PPLTool.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index 2108aa37..b1154b1c 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -524,7 +524,9 @@ private String parseOutput(String llmOutput, String indexName) { throw new IllegalArgumentException("The returned PPL: " + llmOutput + " has wrong format"); } } - ppl = ppl.replace("`", ""); + if (this.pplModelType != PPLModelType.FINETUNE) { + ppl = ppl.replace("`", ""); + } ppl = ppl.replaceAll("\\bSPAN\\(", "span("); if (this.head > 0) { String[] lists = llmOutput.split("\\|"); From 1ef9502d961b2b398a9984c0671ee19db00dd65a Mon Sep 17 00:00:00 2001 From: Yuye Zhu Date: Fri, 26 Apr 2024 13:38:50 +0800 Subject: [PATCH 085/119] [Backport 2.x] feat: remove search index tool (#297) * remove search index tool Signed-off-by: yuye-aws * remove search index tool Signed-off-by: yuye-aws --------- Signed-off-by: yuye-aws --- .../java/org/opensearch/agent/ToolPlugin.java | 3 - .../agent/tools/AbstractRetrieverTool.java | 2 +- .../agent/tools/SearchIndexTool.java | 189 ------------------ .../agent/tools/SearchIndexToolTests.java | 183 ----------------- .../integTest/SearchIndexToolIT.java | 136 ------------- ...ent_of_search_index_tool_request_body.json | 10 - 6 files changed, 1 insertion(+), 522 deletions(-) delete mode 100644 src/main/java/org/opensearch/agent/tools/SearchIndexTool.java delete mode 100644 src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java delete mode 100644 src/test/java/org/opensearch/integTest/SearchIndexToolIT.java delete mode 100644 src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_index_tool_request_body.json diff --git a/src/main/java/org/opensearch/agent/ToolPlugin.java b/src/main/java/org/opensearch/agent/ToolPlugin.java index 1db7915e..0124aa7a 100644 --- a/src/main/java/org/opensearch/agent/ToolPlugin.java +++ b/src/main/java/org/opensearch/agent/ToolPlugin.java @@ -16,7 +16,6 @@ import org.opensearch.agent.tools.SearchAlertsTool; import org.opensearch.agent.tools.SearchAnomalyDetectorsTool; import org.opensearch.agent.tools.SearchAnomalyResultsTool; -import org.opensearch.agent.tools.SearchIndexTool; import org.opensearch.agent.tools.SearchMonitorsTool; import org.opensearch.agent.tools.VectorDBTool; import org.opensearch.agent.tools.VisualizationsTool; @@ -66,7 +65,6 @@ public Collection createComponents( VisualizationsTool.Factory.getInstance().init(client); NeuralSparseSearchTool.Factory.getInstance().init(client, xContentRegistry); VectorDBTool.Factory.getInstance().init(client, xContentRegistry); - SearchIndexTool.Factory.getInstance().init(client, xContentRegistry); RAGTool.Factory.getInstance().init(client, xContentRegistry); SearchAlertsTool.Factory.getInstance().init(client); SearchAnomalyDetectorsTool.Factory.getInstance().init(client, namedWriteableRegistry); @@ -83,7 +81,6 @@ public List> getToolFactories() { NeuralSparseSearchTool.Factory.getInstance(), VectorDBTool.Factory.getInstance(), VisualizationsTool.Factory.getInstance(), - SearchIndexTool.Factory.getInstance(), RAGTool.Factory.getInstance(), SearchAlertsTool.Factory.getInstance(), SearchAnomalyDetectorsTool.Factory.getInstance(), diff --git a/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java b/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java index 5003f0fa..f01dde7e 100644 --- a/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java +++ b/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java @@ -68,7 +68,7 @@ protected AbstractRetrieverTool( protected abstract String getQueryBody(String queryText); - public static Map processResponse(SearchHit hit) { + private static Map processResponse(SearchHit hit) { Map docContent = new HashMap<>(); docContent.put("_index", hit.getIndex()); docContent.put("_id", hit.getId()); diff --git a/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java b/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java deleted file mode 100644 index f6d5a80a..00000000 --- a/src/main/java/org/opensearch/agent/tools/SearchIndexTool.java +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.agent.tools; - -import static org.opensearch.ml.common.CommonValue.*; - -import java.io.IOException; -import java.security.AccessController; -import java.security.PrivilegedExceptionAction; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; - -import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchResponse; -import org.opensearch.client.Client; -import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.ml.common.spi.tools.Tool; -import org.opensearch.ml.common.spi.tools.ToolAnnotation; -import org.opensearch.ml.common.transport.connector.MLConnectorSearchAction; -import org.opensearch.ml.common.transport.model.MLModelSearchAction; -import org.opensearch.ml.common.transport.model_group.MLModelGroupSearchAction; -import org.opensearch.ml.common.utils.StringUtils; -import org.opensearch.search.SearchHit; -import org.opensearch.search.builder.SearchSourceBuilder; - -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; - -import lombok.Getter; -import lombok.Setter; -import lombok.extern.log4j.Log4j2; - -@Getter -@Setter -@Log4j2 -@ToolAnnotation(SearchIndexTool.TYPE) -public class SearchIndexTool implements Tool { - - public static final String INPUT_FIELD = "input"; - public static final String INDEX_FIELD = "index"; - public static final String QUERY_FIELD = "query"; - - public static final String TYPE = "SearchIndexTool"; - private static final String DEFAULT_DESCRIPTION = - "Use this tool to search an index by providing two parameters: 'index' for the index name, and 'query' for the OpenSearch DSL formatted query. Only use this tool when a DSL query is available."; - - private String name = TYPE; - - private String description = DEFAULT_DESCRIPTION; - - private Client client; - - private NamedXContentRegistry xContentRegistry; - - public SearchIndexTool(Client client, NamedXContentRegistry xContentRegistry) { - this.client = client; - this.xContentRegistry = xContentRegistry; - } - - @Override - public String getType() { - return TYPE; - } - - @Override - public String getVersion() { - return null; - } - - @Override - public boolean validate(Map parameters) { - return parameters != null && parameters.containsKey(INPUT_FIELD) && parameters.get(INPUT_FIELD) != null; - } - - private SearchRequest getSearchRequest(String index, String query) throws IOException { - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - XContentParser queryParser = XContentType.JSON.xContent().createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, query); - searchSourceBuilder.parseXContent(queryParser); - return new SearchRequest().source(searchSourceBuilder).indices(index); - } - - @Override - public void run(Map parameters, ActionListener listener) { - try { - String input = parameters.get(INPUT_FIELD); - JsonObject jsonObject = StringUtils.gson.fromJson(input, JsonObject.class); - String index = Optional.ofNullable(jsonObject).map(x -> x.get(INDEX_FIELD)).map(JsonElement::getAsString).orElse(null); - String query = Optional.ofNullable(jsonObject).map(x -> x.get(QUERY_FIELD)).map(JsonElement::toString).orElse(null); - if (index == null || query == null) { - listener.onFailure(new IllegalArgumentException("SearchIndexTool's two parameter: index and query are required!")); - return; - } - SearchRequest searchRequest = getSearchRequest(index, query); - - ActionListener actionListener = ActionListener.wrap(r -> { - SearchHit[] hits = r.getHits().getHits(); - - if (hits != null && hits.length > 0) { - StringBuilder contextBuilder = new StringBuilder(); - for (SearchHit hit : hits) { - String doc = AccessController.doPrivileged((PrivilegedExceptionAction) () -> { - Map docContent = AbstractRetrieverTool.processResponse(hit); - return StringUtils.gson.toJson(docContent); - }); - contextBuilder.append(doc).append("\n"); - } - listener.onResponse((T) contextBuilder.toString()); - } else { - listener.onResponse((T) ""); - } - }, e -> { - log.error("Failed to search index", e); - listener.onFailure(e); - }); - - // since searching connector and model needs access control, we need - // to forward the request corresponding transport action - if (Objects.equals(index, ML_CONNECTOR_INDEX)) { - client.execute(MLConnectorSearchAction.INSTANCE, searchRequest, actionListener); - } else if (Objects.equals(index, ML_MODEL_INDEX)) { - client.execute(MLModelSearchAction.INSTANCE, searchRequest, actionListener); - } else if (Objects.equals(index, ML_MODEL_GROUP_INDEX)) { - client.execute(MLModelGroupSearchAction.INSTANCE, searchRequest, actionListener); - } else { - client.search(searchRequest, actionListener); - } - } catch (Exception e) { - log.error("Failed to search index", e); - listener.onFailure(e); - } - } - - public static class Factory implements Tool.Factory { - - private Client client; - private static Factory INSTANCE; - - private NamedXContentRegistry xContentRegistry; - - /** - * Create or return the singleton factory instance - */ - public static Factory getInstance() { - if (INSTANCE != null) { - return INSTANCE; - } - synchronized (SearchIndexTool.class) { - if (INSTANCE != null) { - return INSTANCE; - } - INSTANCE = new Factory(); - return INSTANCE; - } - } - - public void init(Client client, NamedXContentRegistry xContentRegistry) { - this.client = client; - this.xContentRegistry = xContentRegistry; - } - - @Override - public SearchIndexTool create(Map params) { - return new SearchIndexTool(client, xContentRegistry); - } - - @Override - public String getDefaultDescription() { - return DEFAULT_DESCRIPTION; - } - - @Override - public String getDefaultType() { - return TYPE; - } - - @Override - public String getDefaultVersion() { - return null; - } - } -} diff --git a/src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java deleted file mode 100644 index d228c0cb..00000000 --- a/src/test/java/org/opensearch/agent/tools/SearchIndexToolTests.java +++ /dev/null @@ -1,183 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.agent.tools; - -import static org.junit.Assert.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.*; - -import java.io.InputStream; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CompletableFuture; - -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mockito; -import org.opensearch.action.search.SearchResponse; -import org.opensearch.client.Client; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.common.Strings; -import org.opensearch.core.xcontent.DeprecationHandler; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.ml.common.transport.connector.MLConnectorSearchAction; -import org.opensearch.ml.common.transport.model.MLModelSearchAction; -import org.opensearch.ml.common.transport.model_group.MLModelGroupSearchAction; -import org.opensearch.search.SearchModule; - -import lombok.SneakyThrows; - -public class SearchIndexToolTests { - static public final NamedXContentRegistry TEST_XCONTENT_REGISTRY_FOR_QUERY = new NamedXContentRegistry( - new SearchModule(Settings.EMPTY, List.of()).getNamedXContents() - ); - - private Client client; - - private SearchIndexTool mockedSearchIndexTool; - - private String mockedSearchResponseString; - - @Before - @SneakyThrows - public void setup() { - client = mock(Client.class); - mockedSearchIndexTool = Mockito - .mock( - SearchIndexTool.class, - Mockito.withSettings().useConstructor(client, TEST_XCONTENT_REGISTRY_FOR_QUERY).defaultAnswer(Mockito.CALLS_REAL_METHODS) - ); - - try (InputStream searchResponseIns = SearchIndexTool.class.getResourceAsStream("retrieval_tool_search_response.json")) { - if (searchResponseIns != null) { - mockedSearchResponseString = new String(searchResponseIns.readAllBytes()); - } - } - } - - @Test - @SneakyThrows - public void testGetType() { - String type = mockedSearchIndexTool.getType(); - assertFalse(Strings.isNullOrEmpty(type)); - assertEquals("SearchIndexTool", type); - } - - @Test - @SneakyThrows - public void testValidate() { - Map parameters = Map.of("input", "{}"); - assertTrue(mockedSearchIndexTool.validate(parameters)); - } - - @Test - @SneakyThrows - public void testValidateWithEmptyInput() { - Map parameters = Map.of(); - assertFalse(mockedSearchIndexTool.validate(parameters)); - } - - @Test - public void testRunWithNormalIndex() { - String inputString = "{\"index\": \"test-index\", \"query\": {\"query\": {\"match_all\": {}}}}"; - Map parameters = Map.of("input", inputString); - mockedSearchIndexTool.run(parameters, null); - Mockito.verify(client, times(1)).search(any(), any()); - Mockito.verify(client, Mockito.never()).execute(any(), any(), any()); - } - - @Test - public void testRunWithConnectorIndex() { - String inputString = "{\"index\": \".plugins-ml-connector\", \"query\": {\"query\": {\"match_all\": {}}}}"; - Map parameters = Map.of("input", inputString); - mockedSearchIndexTool.run(parameters, null); - Mockito.verify(client, never()).search(any(), any()); - Mockito.verify(client, times(1)).execute(eq(MLConnectorSearchAction.INSTANCE), any(), any()); - } - - @Test - public void testRunWithModelIndex() { - String inputString = "{\"index\": \".plugins-ml-model\", \"query\": {\"query\": {\"match_all\": {}}}}"; - Map parameters = Map.of("input", inputString); - mockedSearchIndexTool.run(parameters, null); - Mockito.verify(client, never()).search(any(), any()); - Mockito.verify(client, times(1)).execute(eq(MLModelSearchAction.INSTANCE), any(), any()); - } - - @Test - public void testRunWithModelGroupIndex() { - String inputString = "{\"index\": \".plugins-ml-model-group\", \"query\": {\"query\": {\"match_all\": {}}}}"; - Map parameters = Map.of("input", inputString); - mockedSearchIndexTool.run(parameters, null); - Mockito.verify(client, never()).search(any(), any()); - Mockito.verify(client, times(1)).execute(eq(MLModelGroupSearchAction.INSTANCE), any(), any()); - } - - @Test - @SneakyThrows - public void testRunWithSearchResults() { - SearchResponse mockedSearchResponse = SearchResponse - .fromXContent( - JsonXContent.jsonXContent - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, mockedSearchResponseString) - ); - doAnswer(invocation -> { - ActionListener listener = invocation.getArgument(1); - listener.onResponse(mockedSearchResponse); - return null; - }).when(client).search(any(), any()); - - String inputString = "{\"index\": \"test-index\", \"query\": {\"query\": {\"match_all\": {}}}}"; - final CompletableFuture future = new CompletableFuture<>(); - ActionListener listener = ActionListener.wrap(r -> { future.complete(r); }, e -> { future.completeExceptionally(e); }); - Map parameters = Map.of("input", inputString); - mockedSearchIndexTool.run(parameters, listener); - - future.join(); - - Mockito.verify(client, times(1)).search(any(), any()); - Mockito.verify(client, Mockito.never()).execute(any(), any(), any()); - } - - @Test - @SneakyThrows - public void testRunWithEmptyQuery() { - String inputString = "{\"index\": \"test_index\"}"; - Map parameters = Map.of("input", inputString); - ActionListener listener = mock(ActionListener.class); - mockedSearchIndexTool.run(parameters, listener); - Mockito.verify(client, Mockito.never()).execute(any(), any(), any()); - Mockito.verify(client, Mockito.never()).search(any(), any()); - } - - @Test - public void testRunWithInvalidQuery() { - String inputString = "{\"index\": \"test-index\", \"query\": \"invalid query\"}"; - Map parameters = Map.of("input", inputString); - ActionListener listener = mock(ActionListener.class); - mockedSearchIndexTool.run(parameters, listener); - Mockito.verify(client, Mockito.never()).execute(any(), any(), any()); - Mockito.verify(client, Mockito.never()).search(any(), any()); - } - - @Test - public void testRunWithEmptyQueryBody() { - String inputString = "{\"index\": \"test-index\", \"query\": {}}"; - Map parameters = Map.of("input", inputString); - mockedSearchIndexTool.run(parameters, null); - Mockito.verify(client, times(1)).search(any(), any()); - Mockito.verify(client, Mockito.never()).execute(any(), any(), any()); - } - - @Test - public void testFactory() { - SearchIndexTool searchIndexTool = SearchIndexTool.Factory.getInstance().create(Collections.emptyMap()); - assertEquals(SearchIndexTool.TYPE, searchIndexTool.getType()); - } -} diff --git a/src/test/java/org/opensearch/integTest/SearchIndexToolIT.java b/src/test/java/org/opensearch/integTest/SearchIndexToolIT.java deleted file mode 100644 index f989ebef..00000000 --- a/src/test/java/org/opensearch/integTest/SearchIndexToolIT.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.integTest; - -import static org.hamcrest.Matchers.containsString; - -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.List; - -import org.hamcrest.MatcherAssert; -import org.junit.After; -import org.junit.Before; -import org.opensearch.client.ResponseException; - -import lombok.SneakyThrows; - -public class SearchIndexToolIT extends BaseAgentToolsIT { - public static String TEST_INDEX_NAME = "test_index"; - private String registerAgentRequestBody; - - @SneakyThrows - private void prepareIndex() { - createIndexWithConfiguration( - TEST_INDEX_NAME, - "{\n" - + " \"mappings\": {\n" - + " \"properties\": {\n" - + " \"text\": {\n" - + " \"type\": \"text\"\n" - + " }\n" - + " }\n" - + " }\n" - + "}" - ); - addDocToIndex(TEST_INDEX_NAME, "0", List.of("text"), List.of("text doc 1")); - addDocToIndex(TEST_INDEX_NAME, "1", List.of("text"), List.of("text doc 2")); - addDocToIndex(TEST_INDEX_NAME, "2", List.of("text"), List.of("text doc 3")); - } - - @Before - @SneakyThrows - public void setUp() { - super.setUp(); - prepareIndex(); - registerAgentRequestBody = Files - .readString( - Path - .of( - this - .getClass() - .getClassLoader() - .getResource("org/opensearch/agent/tools/register_flow_agent_of_search_index_tool_request_body.json") - .toURI() - ) - ); - } - - @After - @SneakyThrows - public void tearDown() { - super.tearDown(); - deleteExternalIndices(); - } - - public void testSearchIndexToolInFlowAgent_withMatchAllQuery() { - String agentId = createAgent(registerAgentRequestBody); - String agentInput = "{\n" - + " \"parameters\": {\n" - + " \"input\": {\n" - + " \"index\": \"test_index\",\n" - + " \"query\": {\n" - + " \"query\": {\n" - + " \"match_all\": {}\n" - + " }\n" - + " }\n" - + " } \n" - + " }\n" - + "}\n"; - String result = executeAgent(agentId, agentInput); - assertEquals( - "The search index result not equal with expected.", - "{\"_index\":\"test_index\",\"_source\":{\"text\":\"text doc 1\"},\"_id\":\"0\",\"_score\":1.0}\n" - + "{\"_index\":\"test_index\",\"_source\":{\"text\":\"text doc 2\"},\"_id\":\"1\",\"_score\":1.0}\n" - + "{\"_index\":\"test_index\",\"_source\":{\"text\":\"text doc 3\"},\"_id\":\"2\",\"_score\":1.0}\n", - result - ); - } - - public void testSearchIndexToolInFlowAgent_withEmptyIndexField_thenThrowException() { - String agentId = createAgent(registerAgentRequestBody); - String agentInput = "{\n" - + " \"parameters\": {\n" - + " \"input\": {\n" - + " \"query\": {\n" - + " \"query\": {\n" - + " \"match_all\": {}\n" - + " }\n" - + " }\n" - + " } \n" - + " }\n" - + "}\n"; - Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, agentInput)); - MatcherAssert.assertThat(exception.getMessage(), containsString("SearchIndexTool's two parameter: index and query are required!")); - } - - public void testSearchIndexToolInFlowAgent_withEmptyQueryField_thenThrowException() { - String agentId = createAgent(registerAgentRequestBody); - String agentInput = "{\n" - + " \"parameters\": {\n" - + " \"input\": {\n" - + " \"index\": \"test_index\"\n" - + " } \n" - + " }\n" - + "}\n"; - Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, agentInput)); - MatcherAssert.assertThat(exception.getMessage(), containsString("SearchIndexTool's two parameter: index and query are required!")); - } - - public void testSearchIndexToolInFlowAgent_withIllegalQueryField_thenThrowException() { - String agentId = createAgent(registerAgentRequestBody); - String agentInput = "{\n" - + " \"parameters\": {\n" - + " \"input\": {\n" - + " \"index\": \"test_index\",\n" - + " \"query\": \"Invalid Query\"\n" - + " } \n" - + " }\n" - + "}\n"; - Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, agentInput)); - MatcherAssert.assertThat(exception.getMessage(), containsString("ParsingException")); - } -} diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_index_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_index_tool_request_body.json deleted file mode 100644 index 52a67073..00000000 --- a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_search_index_tool_request_body.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "name": "Test_Search_Index_Agent", - "type": "flow", - "tools": [ - { - "type": "SearchIndexTool", - "description": "Use this tool to search an index by providing two parameters: 'index' for the index name, and 'query' for the OpenSearch DSL formatted query. Only use this tool when a DSL query is available." - } - ] -} \ No newline at end of file From 0f6c1bc482708b9941546502a954b24f9317eda9 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Mon, 29 Apr 2024 10:15:22 +0800 Subject: [PATCH 086/119] Move visualization tool to ml-commons (#296) (#298) (cherry picked from commit 5588571ee57b2aa5d259ff6f3dc4166a1b39b945) Signed-off-by: Hailong Cui Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../java/org/opensearch/agent/ToolPlugin.java | 3 - .../agent/tools/VisualizationsTool.java | 178 ------------------ .../agent/tools/VisualizationsToolTests.java | 161 ---------------- .../integTest/VisualizationsToolIT.java | 114 ----------- .../opensearch/agent/tools/visualization.json | 58 ------ .../agent/tools/visualization_not_found.json | 18 -- 6 files changed, 532 deletions(-) delete mode 100644 src/main/java/org/opensearch/agent/tools/VisualizationsTool.java delete mode 100644 src/test/java/org/opensearch/agent/tools/VisualizationsToolTests.java delete mode 100644 src/test/java/org/opensearch/integTest/VisualizationsToolIT.java delete mode 100644 src/test/resources/org/opensearch/agent/tools/visualization.json delete mode 100644 src/test/resources/org/opensearch/agent/tools/visualization_not_found.json diff --git a/src/main/java/org/opensearch/agent/ToolPlugin.java b/src/main/java/org/opensearch/agent/ToolPlugin.java index 0124aa7a..db07ac0b 100644 --- a/src/main/java/org/opensearch/agent/ToolPlugin.java +++ b/src/main/java/org/opensearch/agent/ToolPlugin.java @@ -18,7 +18,6 @@ import org.opensearch.agent.tools.SearchAnomalyResultsTool; import org.opensearch.agent.tools.SearchMonitorsTool; import org.opensearch.agent.tools.VectorDBTool; -import org.opensearch.agent.tools.VisualizationsTool; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; @@ -62,7 +61,6 @@ public Collection createComponents( this.xContentRegistry = xContentRegistry; PPLTool.Factory.getInstance().init(client); - VisualizationsTool.Factory.getInstance().init(client); NeuralSparseSearchTool.Factory.getInstance().init(client, xContentRegistry); VectorDBTool.Factory.getInstance().init(client, xContentRegistry); RAGTool.Factory.getInstance().init(client, xContentRegistry); @@ -80,7 +78,6 @@ public List> getToolFactories() { PPLTool.Factory.getInstance(), NeuralSparseSearchTool.Factory.getInstance(), VectorDBTool.Factory.getInstance(), - VisualizationsTool.Factory.getInstance(), RAGTool.Factory.getInstance(), SearchAlertsTool.Factory.getInstance(), SearchAnomalyDetectorsTool.Factory.getInstance(), diff --git a/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java b/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java deleted file mode 100644 index 2fa6b996..00000000 --- a/src/main/java/org/opensearch/agent/tools/VisualizationsTool.java +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.agent.tools; - -import java.util.Arrays; -import java.util.Locale; -import java.util.Map; -import java.util.Optional; - -import org.opensearch.ExceptionsHelper; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchResponse; -import org.opensearch.client.Client; -import org.opensearch.client.Requests; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.common.Strings; -import org.opensearch.index.IndexNotFoundException; -import org.opensearch.index.query.BoolQueryBuilder; -import org.opensearch.index.query.QueryBuilders; -import org.opensearch.ml.common.spi.tools.Tool; -import org.opensearch.ml.common.spi.tools.ToolAnnotation; -import org.opensearch.search.SearchHits; -import org.opensearch.search.builder.SearchSourceBuilder; - -import lombok.Builder; -import lombok.Getter; -import lombok.Setter; -import lombok.extern.log4j.Log4j2; - -@Log4j2 -@ToolAnnotation(VisualizationsTool.TYPE) -public class VisualizationsTool implements Tool { - public static final String NAME = "FindVisualizations"; - public static final String TYPE = "VisualizationTool"; - public static final String VERSION = "v1.0"; - - public static final String SAVED_OBJECT_TYPE = "visualization"; - - /** - * default number of visualizations returned - */ - private static final int DEFAULT_SIZE = 3; - private static final String DEFAULT_DESCRIPTION = - "Use this tool to find user created visualizations. This tool takes the visualization name as input and returns matching visualizations"; - @Setter - @Getter - private String description = DEFAULT_DESCRIPTION; - - @Getter - @Setter - private String name = NAME; - @Getter - @Setter - private String type = TYPE; - @Getter - private final String version = VERSION; - private final Client client; - @Getter - private final String index; - @Getter - private final int size; - - @Builder - public VisualizationsTool(Client client, String index, int size) { - this.client = client; - this.index = index; - this.size = size; - } - - @Override - public void run(Map parameters, ActionListener listener) { - BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); - boolQueryBuilder.must().add(QueryBuilders.termQuery("type", SAVED_OBJECT_TYPE)); - boolQueryBuilder.must().add(QueryBuilders.matchQuery(SAVED_OBJECT_TYPE + ".title", parameters.get("input"))); - - SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource().query(boolQueryBuilder); - searchSourceBuilder.from(0).size(size); - SearchRequest searchRequest = Requests.searchRequest(index).source(searchSourceBuilder); - - client.search(searchRequest, new ActionListener<>() { - @Override - public void onResponse(SearchResponse searchResponse) { - SearchHits hits = searchResponse.getHits(); - StringBuilder visBuilder = new StringBuilder(); - visBuilder.append("Title,Id\n"); - if (hits.getTotalHits().value > 0) { - Arrays.stream(hits.getHits()).forEach(h -> { - String id = trimIdPrefix(h.getId()); - Map visMap = (Map) h.getSourceAsMap().get(SAVED_OBJECT_TYPE); - String title = visMap.get("title"); - visBuilder.append(String.format(Locale.ROOT, "%s,%s\n", title, id)); - }); - - listener.onResponse((T) visBuilder.toString()); - } else { - listener.onResponse((T) "No Visualization found"); - } - } - - @Override - public void onFailure(Exception e) { - if (ExceptionsHelper.unwrapCause(e) instanceof IndexNotFoundException) { - listener.onResponse((T) "No Visualization found"); - } else { - listener.onFailure(e); - } - } - }); - } - - String trimIdPrefix(String id) { - id = Optional.ofNullable(id).orElse(""); - if (id.startsWith(SAVED_OBJECT_TYPE)) { - String prefix = String.format(Locale.ROOT, "%s:", SAVED_OBJECT_TYPE); - return id.substring(prefix.length()); - } - return id; - } - - @Override - public boolean validate(Map parameters) { - return parameters.containsKey("input") && !Strings.isNullOrEmpty(parameters.get("input")); - } - - public static class Factory implements Tool.Factory { - private Client client; - - private static VisualizationsTool.Factory INSTANCE; - - public static VisualizationsTool.Factory getInstance() { - if (INSTANCE != null) { - return INSTANCE; - } - synchronized (VisualizationsTool.class) { - if (INSTANCE != null) { - return INSTANCE; - } - INSTANCE = new VisualizationsTool.Factory(); - return INSTANCE; - } - } - - public void init(Client client) { - this.client = client; - } - - @Override - public VisualizationsTool create(Map params) { - String index = params.get("index") == null ? ".kibana" : (String) params.get("index"); - String sizeStr = params.get("size") == null ? "3" : (String) params.get("size"); - int size; - try { - size = Integer.parseInt(sizeStr); - } catch (NumberFormatException ignored) { - size = DEFAULT_SIZE; - } - return VisualizationsTool.builder().client(client).index(index).size(size).build(); - } - - @Override - public String getDefaultDescription() { - return DEFAULT_DESCRIPTION; - } - - @Override - public String getDefaultType() { - return TYPE; - } - - @Override - public String getDefaultVersion() { - return null; - } - } -} diff --git a/src/test/java/org/opensearch/agent/tools/VisualizationsToolTests.java b/src/test/java/org/opensearch/agent/tools/VisualizationsToolTests.java deleted file mode 100644 index 9cd79ff9..00000000 --- a/src/test/java/org/opensearch/agent/tools/VisualizationsToolTests.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.agent.tools; - -import static org.junit.Assert.assertEquals; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.util.Collections; -import java.util.Map; -import java.util.concurrent.CompletableFuture; - -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.ArgumentMatchers; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.MockitoAnnotations; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchResponse; -import org.opensearch.client.Client; -import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.xcontent.DeprecationHandler; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.index.IndexNotFoundException; -import org.opensearch.ml.common.spi.tools.Tool; - -public class VisualizationsToolTests { - @Mock - private Client client; - - private String searchResponse = "{}"; - private String searchResponseNotFound = "{}"; - - @Before - public void setup() throws IOException { - MockitoAnnotations.openMocks(this); - VisualizationsTool.Factory.getInstance().init(client); - try (InputStream searchResponseIns = VisualizationsToolTests.class.getResourceAsStream("visualization.json")) { - if (searchResponseIns != null) { - searchResponse = new String(searchResponseIns.readAllBytes(), StandardCharsets.UTF_8); - } - } - try (InputStream searchResponseIns = VisualizationsToolTests.class.getResourceAsStream("visualization_not_found.json")) { - if (searchResponseIns != null) { - searchResponseNotFound = new String(searchResponseIns.readAllBytes(), StandardCharsets.UTF_8); - } - } - } - - @Test - public void testToolIndexName() { - VisualizationsTool tool1 = VisualizationsTool.Factory.getInstance().create(Collections.emptyMap()); - assertEquals(tool1.getIndex(), ".kibana"); - - VisualizationsTool tool2 = VisualizationsTool.Factory.getInstance().create(Map.of("index", "test-index")); - assertEquals(tool2.getIndex(), "test-index"); - } - - @Test - public void testNumberOfVisualizationReturned() { - VisualizationsTool tool1 = VisualizationsTool.Factory.getInstance().create(Collections.emptyMap()); - assertEquals(tool1.getSize(), 3); - - VisualizationsTool tool2 = VisualizationsTool.Factory.getInstance().create(Map.of("size", "1")); - assertEquals(tool2.getSize(), 1); - - VisualizationsTool tool3 = VisualizationsTool.Factory.getInstance().create(Map.of("size", "badString")); - assertEquals(tool3.getSize(), 3); - } - - @Test - public void testTrimPrefix() { - VisualizationsTool tool = VisualizationsTool.Factory.getInstance().create(Collections.emptyMap()); - assertEquals(tool.trimIdPrefix(null), ""); - assertEquals(tool.trimIdPrefix("abc"), "abc"); - assertEquals(tool.trimIdPrefix("visualization:abc"), "abc"); - } - - @Test - public void testParameterValidation() { - VisualizationsTool tool = VisualizationsTool.Factory.getInstance().create(Collections.emptyMap()); - Assert.assertFalse(tool.validate(Collections.emptyMap())); - Assert.assertFalse(tool.validate(Map.of("input", ""))); - Assert.assertTrue(tool.validate(Map.of("input", "question"))); - } - - @Test - public void testRunToolWithVisualizationFound() throws Exception { - Tool tool = VisualizationsTool.Factory.getInstance().create(Collections.emptyMap()); - final CompletableFuture future = new CompletableFuture<>(); - ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); - - ArgumentCaptor> searchResponseListener = ArgumentCaptor.forClass(ActionListener.class); - Mockito.doNothing().when(client).search(ArgumentMatchers.any(SearchRequest.class), searchResponseListener.capture()); - - Map params = Map.of("input", "Sales by gender"); - - tool.run(params, listener); - - SearchResponse response = SearchResponse - .fromXContent( - JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, searchResponse) - ); - searchResponseListener.getValue().onResponse(response); - - future.join(); - assertEquals("Title,Id\n[Ecommerce]Sales by gender,aeb212e0-4c84-11e8-b3d7-01146121b73d\n", future.get()); - } - - @Test - public void testRunToolWithNoVisualizationFound() throws Exception { - Tool tool = VisualizationsTool.Factory.getInstance().create(Collections.emptyMap()); - final CompletableFuture future = new CompletableFuture<>(); - ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); - - ArgumentCaptor> searchResponseListener = ArgumentCaptor.forClass(ActionListener.class); - Mockito.doNothing().when(client).search(ArgumentMatchers.any(SearchRequest.class), searchResponseListener.capture()); - - Map params = Map.of("input", "Sales by gender"); - - tool.run(params, listener); - - SearchResponse response = SearchResponse - .fromXContent( - JsonXContent.jsonXContent - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, searchResponseNotFound) - ); - searchResponseListener.getValue().onResponse(response); - - future.join(); - assertEquals("No Visualization found", future.get()); - } - - @Test - public void testRunToolWithIndexNotExists() throws Exception { - Tool tool = VisualizationsTool.Factory.getInstance().create(Collections.emptyMap()); - final CompletableFuture future = new CompletableFuture<>(); - ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); - - ArgumentCaptor> searchResponseListener = ArgumentCaptor.forClass(ActionListener.class); - Mockito.doNothing().when(client).search(ArgumentMatchers.any(SearchRequest.class), searchResponseListener.capture()); - - Map params = Map.of("input", "Sales by gender"); - - tool.run(params, listener); - - IndexNotFoundException notFoundException = new IndexNotFoundException("test-index"); - searchResponseListener.getValue().onFailure(notFoundException); - - future.join(); - assertEquals("No Visualization found", future.get()); - } -} diff --git a/src/test/java/org/opensearch/integTest/VisualizationsToolIT.java b/src/test/java/org/opensearch/integTest/VisualizationsToolIT.java deleted file mode 100644 index 2bf0e611..00000000 --- a/src/test/java/org/opensearch/integTest/VisualizationsToolIT.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.integTest; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; -import java.util.UUID; - -import org.junit.Assert; -import org.opensearch.agent.tools.VisualizationsTool; -import org.opensearch.client.Request; -import org.opensearch.client.Response; -import org.opensearch.core.rest.RestStatus; - -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; -import com.google.gson.JsonParser; - -import lombok.extern.log4j.Log4j2; - -@Log4j2 -public class VisualizationsToolIT extends ToolIntegrationTest { - @Override - List promptHandlers() { - return List.of(new PromptHandler() { - @Override - LLMThought llmThought() { - return LLMThought - .builder() - .action(VisualizationsTool.TYPE) - .actionInput("RAM") - .question("can you show me RAM info with visualization?") - .build(); - } - }, new PromptHandler() { - @Override - LLMThought llmThought() { - return LLMThought - .builder() - .action(VisualizationsTool.TYPE) - .actionInput("sales") - .question("how about the sales about this month?") - .build(); - } - }); - } - - String toolType() { - return VisualizationsTool.TYPE; - } - - public void testVisualizationNotFound() throws IOException { - Request request = new Request("POST", "/_plugins/_ml/agents/" + agentId + "/_execute"); - request.setJsonEntity("{\"parameters\":{\"question\":\"can you show me RAM info with visualization?\"}}"); - Response response = executeRequest(request); - String responseStr = readResponse(response); - String toolOutput = extractAdditionalInfo(responseStr); - Assert.assertEquals("No Visualization found", toolOutput); - } - - public void testVisualizationFound() throws IOException { - String title = "[eCommerce] Sales by Category"; - String id = UUID.randomUUID().toString(); - prepareVisualization(title, id); - Request request = new Request("POST", "/_plugins/_ml/agents/" + agentId + "/_execute"); - request.setJsonEntity("{\"parameters\":{\"question\":\"how about the sales about this month?\"}}"); - Response response = executeRequest(request); - String responseStr = readResponse(response); - String toolOutput = extractAdditionalInfo(responseStr); - Assert.assertEquals("Title,Id\n" + String.format(Locale.ROOT, "%s,%s\n", title, id), toolOutput); - } - - private void prepareVisualization(String title, String id) { - String body = "{\n" - + " \"visualization\": {\n" - + " \"title\": \"" - + title - + "\"\n" - + " },\n" - + " \"type\": \"visualization\"\n" - + "}"; - Response response = makeRequest(client(), "POST", String.format(Locale.ROOT, ".kibana/_doc/%s?refresh=true", id), null, body, null); - Assert.assertEquals(response.getStatusLine().getStatusCode(), RestStatus.CREATED.getStatus()); - } - - private String extractAdditionalInfo(String responseStr) { - JsonArray output = JsonParser - .parseString(responseStr) - .getAsJsonObject() - .get("inference_results") - .getAsJsonArray() - .get(0) - .getAsJsonObject() - .get("output") - .getAsJsonArray(); - for (JsonElement element : output) { - if ("response".equals(element.getAsJsonObject().get("name").getAsString())) { - return element - .getAsJsonObject() - .get("dataAsMap") - .getAsJsonObject() - .get("additional_info") - .getAsJsonObject() - .get(String.format(Locale.ROOT, "%s.output", toolType())) - .getAsString(); - } - } - return null; - } -} diff --git a/src/test/resources/org/opensearch/agent/tools/visualization.json b/src/test/resources/org/opensearch/agent/tools/visualization.json deleted file mode 100644 index 8901706e..00000000 --- a/src/test/resources/org/opensearch/agent/tools/visualization.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "took": 4, - "timed_out": false, - "_shards": { - "total": 1, - "successful": 1, - "skipped": 0, - "failed": 0 - }, - "hits": { - "total": { - "value": 1, - "relation": "eq" - }, - "max_score": 0.2847877, - "hits": [ - { - "_index": ".kibana_1", - "_id": "visualization:aeb212e0-4c84-11e8-b3d7-01146121b73d", - "_score": 0.2847877, - "_source": { - "visualization": { - "title": "[Ecommerce]Sales by gender", - "visState": "", - "uiStateJSON": "{}", - "description": "", - "version": 1, - "kibanaSavedObjectMeta": { - "searchSourceJSON": "{}" - } - }, - "type": "visualization", - "references": [ - { - "name": "control_0_index_pattern", - "type": "index-pattern", - "id": "d3d7af60-4c81-11e8-b3d7-01146121b73d" - }, - { - "name": "control_1_index_pattern", - "type": "index-pattern", - "id": "d3d7af60-4c81-11e8-b3d7-01146121b73d" - }, - { - "name": "control_2_index_pattern", - "type": "index-pattern", - "id": "d3d7af60-4c81-11e8-b3d7-01146121b73d" - } - ], - "migrationVersion": { - "visualization": "7.10.0" - }, - "updated_at": "2023-11-10T02:50:24.881Z" - } - } - ] - } -} diff --git a/src/test/resources/org/opensearch/agent/tools/visualization_not_found.json b/src/test/resources/org/opensearch/agent/tools/visualization_not_found.json deleted file mode 100644 index 40a0e9d3..00000000 --- a/src/test/resources/org/opensearch/agent/tools/visualization_not_found.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "took": 1, - "timed_out": false, - "_shards": { - "total": 1, - "successful": 1, - "skipped": 0, - "failed": 0 - }, - "hits": { - "total": { - "value": 0, - "relation": "eq" - }, - "max_score": null, - "hits": [] - } -} From fc352a9c3969597da4d2142aaeb75837266c45d3 Mon Sep 17 00:00:00 2001 From: zane-neo Date: Tue, 30 Apr 2024 14:04:32 +0800 Subject: [PATCH 087/119] Add 2.14 release notes (#299) Signed-off-by: zane-neo --- .../opensearch-skills.release-notes-2.14.0.0.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 release-notes/opensearch-skills.release-notes-2.14.0.0.md diff --git a/release-notes/opensearch-skills.release-notes-2.14.0.0.md b/release-notes/opensearch-skills.release-notes-2.14.0.0.md new file mode 100644 index 00000000..5617b850 --- /dev/null +++ b/release-notes/opensearch-skills.release-notes-2.14.0.0.md @@ -0,0 +1,13 @@ +# 2024-04-29 Version 2.14.0.0 + +Compatible with OpenSearch 2.14.0 + +### Features +* Fix filter fields, adding geo point and date_nanos (#285) (#286) +* Change ad plugin jar dependency (#288) +* Remove logic about replace quota for finetuning model (#289) (#291) +* Move search index tool to ml-commons repo (#297) +* Move visualization tool to ml-commons (#296) (#298) + +### Dependencies +* Increment byte-buddy version to 1.14.9 (#288) From 1ddf2cdc09568ff0da13a5a25207f37ebc90a485 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Tue, 30 Apr 2024 22:36:33 +0800 Subject: [PATCH 088/119] add error message for NPE (#301) (#304) * add error message for NPE * apply spotless * Update src/main/java/org/opensearch/agent/tools/PPLTool.java * fix spot less --------- (cherry picked from commit 05def20b8bc7d829daedfffdc2ea4029259d05ad) Signed-off-by: xinyual Signed-off-by: Xinyuan Lu <74362153+xinyual@users.noreply.github.com> Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] Co-authored-by: zane-neo --- src/main/java/org/opensearch/agent/tools/PPLTool.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index b1154b1c..25201fe1 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -386,6 +386,11 @@ private String constructTableInfo(SearchHit[] searchHits, Map mappingSource = (Map) mappingMetadata.getSourceAsMap().get("properties"); + if (Objects.isNull(mappingSource)) { + throw new IllegalArgumentException( + "The querying index doesn't have mapping metadata, please add data to it or using another index." + ); + } Map fieldsToType = new HashMap<>(); extractNamesTypes(mappingSource, fieldsToType, ""); StringJoiner tableInfoJoiner = new StringJoiner("\n"); From 37beb3b46027ce952917bc8ae2f48e802e9c47f1 Mon Sep 17 00:00:00 2001 From: opensearch-ci-bot Date: Tue, 30 Apr 2024 00:08:03 +0000 Subject: [PATCH 089/119] Increment version to 2.14.0-SNAPSHOT Signed-off-by: opensearch-ci-bot --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 6d8a2e76..0b28efe6 100644 --- a/build.gradle +++ b/build.gradle @@ -11,7 +11,7 @@ import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin buildscript { ext { opensearch_group = "org.opensearch" - opensearch_version = System.getProperty("opensearch.version", "2.13.0-SNAPSHOT") + opensearch_version = System.getProperty("opensearch.version", "2.14.0-SNAPSHOT") buildVersionQualifier = System.getProperty("build.version_qualifier", "") isSnapshot = "true" == System.getProperty("build.snapshot", "true") version_tokens = opensearch_version.tokenize('-') From 8ee946be9d9cc83aabf2782f6cc4778f4a6fc519 Mon Sep 17 00:00:00 2001 From: Daniel Widdis Date: Wed, 1 May 2024 13:06:10 -0700 Subject: [PATCH 090/119] Change AD jar name for 2.14 compatibility (#308) Signed-off-by: Daniel Widdis --- build.gradle | 4 ++-- .../agent/tools/SearchAnomalyDetectorsToolTests.java | 7 +++---- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/build.gradle b/build.gradle index 0b28efe6..26cc1083 100644 --- a/build.gradle +++ b/build.gradle @@ -107,7 +107,7 @@ task addJarsToClasspath(type: Copy) { into("$buildDir/classes") from(fileTree(dir: adJarDirectory)) { - include "opensearch-time-series-analytics-${opensearch_build}.jar" + include "opensearch-anomaly-detection-${opensearch_build}.jar" } into("$buildDir/classes") } @@ -124,7 +124,7 @@ dependencies { // Plugin dependencies compileOnly group: 'org.opensearch', name:'opensearch-ml-client', version: "${opensearch_build}" implementation fileTree(dir: jsJarDirectory, include: ["opensearch-job-scheduler-${opensearch_build}.jar"]) - implementation fileTree(dir: adJarDirectory, include: ["opensearch-time-series-analytics-${opensearch_build}.jar"]) + implementation fileTree(dir: adJarDirectory, include: ["opensearch-anomaly-detection-${opensearch_build}.jar"]) implementation fileTree(dir: sqlJarDirectory, include: ["opensearch-sql-${opensearch_build}.jar", "ppl-${opensearch_build}.jar", "protocol-${opensearch_build}.jar"]) compileOnly "org.opensearch:common-utils:${opensearch_build}" compileOnly "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}" diff --git a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java index 8a316930..8bf4de56 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java @@ -32,6 +32,7 @@ import org.opensearch.action.ActionType; import org.opensearch.action.search.SearchResponse; import org.opensearch.ad.model.AnomalyDetector; +import org.opensearch.ad.model.IntervalTimeConfiguration; import org.opensearch.ad.transport.GetAnomalyDetectorAction; import org.opensearch.ad.transport.GetAnomalyDetectorResponse; import org.opensearch.ad.transport.SearchAnomalyDetectorAction; @@ -45,7 +46,6 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.ml.common.spi.tools.Tool; import org.opensearch.search.SearchHit; -import org.opensearch.timeseries.model.IntervalTimeConfiguration; public class SearchAnomalyDetectorsToolTests { @Mock @@ -85,7 +85,6 @@ public void setup() { Instant.now(), Collections.emptyList(), null, - null, null ); } @@ -126,7 +125,7 @@ public void testRunWithSingleAnomalyDetector() throws Exception { content.field("last_update_time", testDetector.getLastUpdateTime().toEpochMilli()); content.endObject(); SearchHit[] hits = new SearchHit[1]; - hits[0] = new SearchHit(0, testDetector.getId(), null, null).sourceRef(BytesReference.bytes(content)); + hits[0] = new SearchHit(0, testDetector.getDetectorId(), null, null).sourceRef(BytesReference.bytes(content)); SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); String expectedResponseStr = getExpectedResponseString(testDetector); @@ -461,7 +460,7 @@ private String getExpectedResponseString(AnomalyDetector testDetector) { return String .format( "AnomalyDetectors=[{id=%s,name=%s,type=%s,description=%s,index=%s,lastUpdateTime=%d}]TotalAnomalyDetectors=%d", - testDetector.getId(), + testDetector.getDetectorId(), testDetector.getName(), testDetector.getDetectorType(), testDetector.getDescription(), From e2da02d25797f5d808430b7062a1d10ef64c9971 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Thu, 6 Jun 2024 08:19:47 +0800 Subject: [PATCH 091/119] Increment version to 2.15.0-SNAPSHOT (#310) Signed-off-by: opensearch-ci-bot Co-authored-by: opensearch-ci-bot --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 26cc1083..415751d3 100644 --- a/build.gradle +++ b/build.gradle @@ -11,7 +11,7 @@ import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin buildscript { ext { opensearch_group = "org.opensearch" - opensearch_version = System.getProperty("opensearch.version", "2.14.0-SNAPSHOT") + opensearch_version = System.getProperty("opensearch.version", "2.15.0-SNAPSHOT") buildVersionQualifier = System.getProperty("build.version_qualifier", "") isSnapshot = "true" == System.getProperty("build.snapshot", "true") version_tokens = opensearch_version.tokenize('-') From bb32961afecd90a0bdfea0d13bc4d9b7a3218ed1 Mon Sep 17 00:00:00 2001 From: Hailong Cui Date: Thu, 6 Jun 2024 16:13:26 +0800 Subject: [PATCH 092/119] Fix build error due to upstream change (#316) (#317) * fix build error due to upstream change Signed-off-by: Hailong Cui * failed unit test Signed-off-by: Hailong Cui * fix compile error Signed-off-by: Hailong Cui * fix flaky test for locale mr Signed-off-by: Hailong Cui * force to use x86_64 for macos Signed-off-by: Hailong Cui --------- Signed-off-by: Hailong Cui (cherry picked from commit 62ac87fa63a41fa460ae3795cdeaf1db927d1588) --- .github/workflows/ci.yml | 2 +- .../agent/tools/SearchAlertsTool.java | 3 +- .../tools/SearchAnomalyDetectorsTool.java | 4 +- .../SearchAnomalyDetectorsToolTests.java | 14 ++++-- .../SearchAnomalyDetectorsToolIT.java | 43 ++++++++++--------- 5 files changed, 38 insertions(+), 28 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index aa65ae37..c5ba123a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -87,7 +87,7 @@ jobs: export FC=/usr/local/Cellar/gcc/12.2.0/bin/gfortran - name: Run build run: | - ./gradlew build + ./gradlew build -Dos.arch=x86_64 build-windows: strategy: diff --git a/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java index 5abd6121..7be36955 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAlertsTool.java @@ -104,7 +104,8 @@ public void run(Map parameters, ActionListener listener) alertIndex, monitorIds, workflowIds, - alertIds + alertIds, + null ); // create response listener diff --git a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java index e4a8e89d..c8e4ab8a 100644 --- a/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java +++ b/src/main/java/org/opensearch/agent/tools/SearchAnomalyDetectorsTool.java @@ -20,7 +20,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.ad.client.AnomalyDetectionNodeClient; import org.opensearch.ad.model.ADTask; -import org.opensearch.ad.transport.GetAnomalyDetectorRequest; import org.opensearch.ad.transport.GetAnomalyDetectorResponse; import org.opensearch.agent.tools.utils.ToolConstants; import org.opensearch.agent.tools.utils.ToolConstants.DetectorStateString; @@ -41,6 +40,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.SortOrder; +import org.opensearch.timeseries.transport.GetConfigRequest; import lombok.Getter; import lombok.Setter; @@ -164,7 +164,7 @@ public void run(Map parameters, ActionListener listener) listener.onFailure(e); }); - GetAnomalyDetectorRequest profileRequest = new GetAnomalyDetectorRequest( + GetConfigRequest profileRequest = new GetConfigRequest( hit.getId(), Versions.MATCH_ANY, false, diff --git a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java index 8bf4de56..407690f6 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java @@ -32,7 +32,6 @@ import org.opensearch.action.ActionType; import org.opensearch.action.search.SearchResponse; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.IntervalTimeConfiguration; import org.opensearch.ad.transport.GetAnomalyDetectorAction; import org.opensearch.ad.transport.GetAnomalyDetectorResponse; import org.opensearch.ad.transport.SearchAnomalyDetectorAction; @@ -46,6 +45,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.ml.common.spi.tools.Tool; import org.opensearch.search.SearchHit; +import org.opensearch.timeseries.model.IntervalTimeConfiguration; public class SearchAnomalyDetectorsToolTests { @Mock @@ -85,6 +85,14 @@ public void setup() { Instant.now(), Collections.emptyList(), null, + null, + null, + null, + null, + null, + null, + null, + null, null ); } @@ -125,7 +133,7 @@ public void testRunWithSingleAnomalyDetector() throws Exception { content.field("last_update_time", testDetector.getLastUpdateTime().toEpochMilli()); content.endObject(); SearchHit[] hits = new SearchHit[1]; - hits[0] = new SearchHit(0, testDetector.getDetectorId(), null, null).sourceRef(BytesReference.bytes(content)); + hits[0] = new SearchHit(0, testDetector.getId(), null, null).sourceRef(BytesReference.bytes(content)); SearchResponse getDetectorsResponse = TestHelpers.generateSearchResponse(hits); String expectedResponseStr = getExpectedResponseString(testDetector); @@ -460,7 +468,7 @@ private String getExpectedResponseString(AnomalyDetector testDetector) { return String .format( "AnomalyDetectors=[{id=%s,name=%s,type=%s,description=%s,index=%s,lastUpdateTime=%d}]TotalAnomalyDetectors=%d", - testDetector.getDetectorId(), + testDetector.getId(), testDetector.getName(), testDetector.getDetectorType(), testDetector.getDescription(), diff --git a/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java b/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java index 7fdc593c..eb0c529d 100644 --- a/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java +++ b/src/test/java/org/opensearch/integTest/SearchAnomalyDetectorsToolIT.java @@ -8,6 +8,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.List; +import java.util.Locale; import org.junit.After; import org.junit.Before; @@ -71,9 +72,9 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_detectorNameParam() { String agentInput2 = "{\"parameters\":{\"detectorName\": \"" + detectorName + "\"}}"; String result2 = executeAgent(agentId, agentInput2); - assertTrue(result2.contains(String.format("id=%s", detectorId))); - assertTrue(result2.contains(String.format("name=%s", detectorName))); - assertTrue(result2.contains(String.format("TotalAnomalyDetectors=%d", 1))); + assertTrue(result2.contains(String.format(Locale.ROOT, "id=%s", detectorId))); + assertTrue(result2.contains(String.format(Locale.ROOT, "name=%s", detectorName))); + assertTrue(result2.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", 1))); } finally { if (detectorId != null) { deleteDetector(detectorId); @@ -95,9 +96,9 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_detectorNamePatternParam() String agentInput2 = "{\"parameters\":{\"detectorNamePattern\": \"" + detectorName + "*" + "\"}}"; String result2 = executeAgent(agentId, agentInput2); - assertTrue(result2.contains(String.format("id=%s", detectorId))); - assertTrue(result2.contains(String.format("name=%s", detectorName))); - assertTrue(result2.contains(String.format("TotalAnomalyDetectors=%d", 1))); + assertTrue(result2.contains(String.format(Locale.ROOT, "id=%s", detectorId))); + assertTrue(result2.contains(String.format(Locale.ROOT, "name=%s", detectorName))); + assertTrue(result2.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", 1))); } finally { if (detectorId != null) { deleteDetector(detectorId); @@ -120,7 +121,7 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_indicesParam() { String agentInput2 = "{\"parameters\":{\"indices\": \"test-index\"}}"; String result2 = executeAgent(agentId, agentInput2); - assertTrue(result2.contains(String.format("TotalAnomalyDetectors=%d", 1))); + assertTrue(result2.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", 1))); } finally { if (detectorId != null) { deleteDetector(detectorId); @@ -143,9 +144,9 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_highCardinalityParam() { String agentInput2 = "{\"parameters\":{\"highCardinality\": \"false\"}}"; String result2 = executeAgent(agentId, agentInput2); - assertTrue(result2.contains(String.format("id=%s", detectorId))); - assertTrue(result2.contains(String.format("name=%s", detectorName))); - assertTrue(result2.contains(String.format("TotalAnomalyDetectors=%d", 1))); + assertTrue(result2.contains(String.format(Locale.ROOT, "id=%s", detectorId))); + assertTrue(result2.contains(String.format(Locale.ROOT, "name=%s", detectorName))); + assertTrue(result2.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", 1))); } finally { if (detectorId != null) { deleteDetector(detectorId); @@ -172,45 +173,45 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_detectorStateParams() { String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"running\": \"true\"}}"; String result = executeAgent(agentId, agentInput); - assertTrue(result.contains(String.format("TotalAnomalyDetectors=%d", 1))); + assertTrue(result.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", 1))); assertTrue(result.contains(detectorIdRunning)); String agentInput2 = "{\"parameters\":{\"running\": \"false\"}}"; String result2 = executeAgent(agentId, agentInput2); - assertTrue(result2.contains(String.format("TotalAnomalyDetectors=%d", 2))); + assertTrue(result2.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", 2))); assertTrue(result2.contains(detectorIdDisabled1)); assertTrue(result2.contains(detectorIdDisabled2)); String agentInput3 = "{\"parameters\":{\"failed\": \"true\"}}"; String result3 = executeAgent(agentId, agentInput3); - assertTrue(result3.contains(String.format("TotalAnomalyDetectors=%d", 0))); + assertTrue(result3.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", 0))); String agentInput4 = "{\"parameters\":{\"failed\": \"false\"}}"; String result4 = executeAgent(agentId, agentInput4); - assertTrue(result4.contains(String.format("TotalAnomalyDetectors=%d", 3))); + assertTrue(result4.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", 3))); assertTrue(result4.contains(detectorIdRunning)); assertTrue(result4.contains(detectorIdDisabled1)); assertTrue(result4.contains(detectorIdDisabled2)); String agentInput5 = "{\"parameters\":{\"running\": \"true\", \"failed\": \"true\"}}"; String result5 = executeAgent(agentId, agentInput5); - assertTrue(result5.contains(String.format("TotalAnomalyDetectors=%d", 1))); + assertTrue(result5.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", 1))); assertTrue(result5.contains(detectorIdRunning)); String agentInput6 = "{\"parameters\":{\"running\": \"true\", \"failed\": \"false\"}}"; String result6 = executeAgent(agentId, agentInput6); - assertTrue(result6.contains(String.format("TotalAnomalyDetectors=%d", 1))); + assertTrue(result6.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", 1))); assertTrue(result6.contains(detectorIdRunning)); String agentInput7 = "{\"parameters\":{\"running\": \"false\", \"failed\": \"true\"}}"; String result7 = executeAgent(agentId, agentInput7); - assertTrue(result7.contains(String.format("TotalAnomalyDetectors=%d", 2))); + assertTrue(result7.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", 2))); assertTrue(result7.contains(detectorIdDisabled1)); assertTrue(result7.contains(detectorIdDisabled2)); String agentInput8 = "{\"parameters\":{\"running\": \"false\", \"failed\": \"false\"}}"; String result8 = executeAgent(agentId, agentInput8); - assertTrue(result8.contains(String.format("TotalAnomalyDetectors=%d", 2))); + assertTrue(result8.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", 2))); assertTrue(result8.contains(detectorIdDisabled1)); assertTrue(result8.contains(detectorIdDisabled2)); } finally { @@ -243,9 +244,9 @@ public void testSearchAnomalyDetectorsToolInFlowAgent_complexParams() { + detectorName + "\", \"highCardinality\": false, \"sortOrder\": \"asc\", \"sortString\": \"name.keyword\", \"size\": 10, \"startIndex\": 0 }}"; String result = executeAgent(agentId, agentInput); - assertTrue(result.contains(String.format("id=%s", detectorId))); - assertTrue(result.contains(String.format("name=%s", detectorName))); - assertTrue(result.contains(String.format("TotalAnomalyDetectors=%d", 1))); + assertTrue(result.contains(String.format(Locale.ROOT, "id=%s", detectorId))); + assertTrue(result.contains(String.format(Locale.ROOT, "name=%s", detectorName))); + assertTrue(result.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", 1))); } finally { if (detectorId != null) { deleteDetector(detectorId); From a544e37098529a7b55edb650a59bea466a4ea642 Mon Sep 17 00:00:00 2001 From: zane-neo Date: Tue, 11 Jun 2024 10:38:46 +0800 Subject: [PATCH 093/119] add release note (#320) Signed-off-by: zane-neo --- release-notes/opensearch-skills.release-notes-2.15.0.0.md | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 release-notes/opensearch-skills.release-notes-2.15.0.0.md diff --git a/release-notes/opensearch-skills.release-notes-2.15.0.0.md b/release-notes/opensearch-skills.release-notes-2.15.0.0.md new file mode 100644 index 00000000..f8a03999 --- /dev/null +++ b/release-notes/opensearch-skills.release-notes-2.15.0.0.md @@ -0,0 +1,6 @@ +# 2024-06-11 Version 2.15.0.0 + +Compatible with OpenSearch 2.15.0 + +### Maintenance +Increment version to 2.15.0.0. From 9e572a8d57ef7004dd72b695916a57e2127618b3 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 12 Jun 2024 11:33:05 +0800 Subject: [PATCH 094/119] Fix it failure (#321) (#322) * Fix IT failures * format code * fix * nit --------- (cherry picked from commit 5362a9abb96ac8aa99d907c5c6a169b5da96579f) Signed-off-by: zane-neo Signed-off-by: zhichao-aws Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] Co-authored-by: zhichao-aws --- .../integTest/NeuralSparseSearchToolIT.java | 5 ++++- .../java/org/opensearch/integTest/RAGToolIT.java | 14 ++++++++++---- .../org/opensearch/integTest/VectorDBToolIT.java | 2 +- 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java b/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java index f6575c09..3758c84d 100644 --- a/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java +++ b/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java @@ -145,7 +145,10 @@ public void testNeuralSparseSearchToolInFlowAgent_withIllegalEmbeddingField_then org.hamcrest.MatcherAssert .assertThat( exception.getMessage(), - allOf(containsString("all shards failed"), containsString("SearchPhaseExecutionException")) + allOf( + containsString("[neural_sparse] query only works on [rank_features] fields"), + containsString("IllegalArgumentException") + ) ); } diff --git a/src/test/java/org/opensearch/integTest/RAGToolIT.java b/src/test/java/org/opensearch/integTest/RAGToolIT.java index a444e7dd..eae20755 100644 --- a/src/test/java/org/opensearch/integTest/RAGToolIT.java +++ b/src/test/java/org/opensearch/integTest/RAGToolIT.java @@ -354,7 +354,10 @@ public void testRAGToolWithNeuralSparseQuery_withIllegalEmbeddingField_thenThrow org.hamcrest.MatcherAssert .assertThat( exception.getMessage(), - allOf(containsString("all shards failed"), containsString("SearchPhaseExecutionException")) + allOf( + containsString("[neural_sparse] query only works on [rank_features] fields"), + containsString("IllegalArgumentException") + ) ); } @@ -365,7 +368,10 @@ public void testRAGToolWithNeuralSparseQueryAndLLM_withIllegalEmbeddingField_the org.hamcrest.MatcherAssert .assertThat( exception.getMessage(), - allOf(containsString("all shards failed"), containsString("SearchPhaseExecutionException")) + allOf( + containsString("[neural_sparse] query only works on [rank_features] fields"), + containsString("IllegalArgumentException") + ) ); } @@ -376,7 +382,7 @@ public void testRAGToolWithNeuralQuery_withIllegalEmbeddingField_thenThrowExcept org.hamcrest.MatcherAssert .assertThat( exception.getMessage(), - allOf(containsString("all shards failed"), containsString("SearchPhaseExecutionException")) + allOf(containsString("Field 'embedding2' is not knn_vector type."), containsString("IllegalArgumentException")) ); } @@ -387,7 +393,7 @@ public void testRAGToolWithNeuralQueryAndLLM_withIllegalEmbeddingField_thenThrow org.hamcrest.MatcherAssert .assertThat( exception.getMessage(), - allOf(containsString("all shards failed"), containsString("SearchPhaseExecutionException")) + allOf(containsString("Field 'embedding2' is not knn_vector type."), containsString("IllegalArgumentException")) ); } diff --git a/src/test/java/org/opensearch/integTest/VectorDBToolIT.java b/src/test/java/org/opensearch/integTest/VectorDBToolIT.java index 668564dc..c494371a 100644 --- a/src/test/java/org/opensearch/integTest/VectorDBToolIT.java +++ b/src/test/java/org/opensearch/integTest/VectorDBToolIT.java @@ -174,7 +174,7 @@ public void testVectorDBToolInFlowAgent_withIllegalEmbeddingField_thenThrowExcep org.hamcrest.MatcherAssert .assertThat( exception.getMessage(), - allOf(containsString("all shards failed"), containsString("SearchPhaseExecutionException")) + allOf(containsString("Field 'embedding2' is not knn_vector type."), containsString("IllegalArgumentException")) ); } From 51e58c534c51a3a531b93200a9a245fe0941689c Mon Sep 17 00:00:00 2001 From: zane-neo Date: Fri, 14 Jun 2024 11:50:17 +0800 Subject: [PATCH 095/119] Fix search alert IT failure (#329) * Fix search alert IT failure Signed-off-by: zane-neo * format code Signed-off-by: zane-neo --------- Signed-off-by: zane-neo --- .../integTest/BaseAgentToolsIT.java | 20 ++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java index 3ee1a406..d24afd04 100644 --- a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java +++ b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java @@ -8,6 +8,7 @@ import java.io.IOException; import java.util.Collections; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.function.Predicate; @@ -235,10 +236,13 @@ protected void deleteModel(String modelId) { } protected void createIndexWithConfiguration(String indexName, String indexConfiguration) throws Exception { - Response response = makeRequest(client(), "PUT", indexName, null, indexConfiguration, null); - Map responseInMap = parseResponseToMap(response); - assertEquals("true", responseInMap.get("acknowledged").toString()); - assertEquals(indexName, responseInMap.get("index").toString()); + boolean indexExists = indexExists(indexName); + if (!indexExists) { + Response response = makeRequest(client(), "PUT", indexName, null, indexConfiguration, null); + Map responseInMap = parseResponseToMap(response); + assertEquals("true", responseInMap.get("acknowledged").toString()); + assertEquals(indexName, responseInMap.get("index").toString()); + } } protected void createIngestPipelineWithConfiguration(String pipelineName, String body) throws Exception { @@ -274,7 +278,13 @@ protected void deleteSystemIndices() throws IOException { .collect(Collectors.toList()); for (final String indexName : externalIndices) { - adminClient().performRequest(new Request("DELETE", "/" + indexName)); + Response deleteResponse = adminClient().performRequest(new Request("DELETE", "/" + indexName)); + Map responseInMap = parseResponseToMap(deleteResponse); + assertEquals( + String.format(Locale.ROOT, "delete index %s failed with response: %s", indexName, gson.toJson(responseInMap)), + "true", + responseInMap.get("acknowledged").toString() + ); } } } From 30bd9898351e9064f46a25f27ff27131f5257866 Mon Sep 17 00:00:00 2001 From: zane-neo Date: Fri, 14 Jun 2024 16:07:58 +0800 Subject: [PATCH 096/119] Fix flaky ITs (#332) * Fix flaky ITs Signed-off-by: zane-neo * fix compilation error Signed-off-by: zane-neo --------- Signed-off-by: zane-neo --- .../SearchAnomalyDetectorsToolTests.java | 54 +++++++++---------- .../integTest/SearchAnomalyResultsToolIT.java | 5 +- .../integTest/SearchMonitorsToolIT.java | 13 ++--- 3 files changed, 38 insertions(+), 34 deletions(-) diff --git a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java index 407690f6..e0b04336 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java @@ -172,9 +172,9 @@ public void testRunWithRunningDetectorTrue() throws Exception { ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); String response = responseCaptor.getValue(); - assertTrue(response.contains(String.format("id=%s", detectorId))); - assertTrue(response.contains(String.format("name=%s", detectorName))); - assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", hits.length))); + assertTrue(response.contains(String.format(Locale.ROOT, "id=%s", detectorId))); + assertTrue(response.contains(String.format(Locale.ROOT, "name=%s", detectorName))); + assertTrue(response.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", hits.length))); } @Test @@ -220,9 +220,9 @@ public void testRunWithRunningDetectorUndefined() throws Exception { ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); String response = responseCaptor.getValue(); - assertTrue(response.contains(String.format("id=%s", detectorId))); - assertTrue(response.contains(String.format("name=%s", detectorName))); - assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", hits.length))); + assertTrue(response.contains(String.format(Locale.ROOT, "id=%s", detectorId))); + assertTrue(response.contains(String.format(Locale.ROOT, "name=%s", detectorName))); + assertTrue(response.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", hits.length))); } @Test @@ -248,9 +248,9 @@ public void testRunWithNullRealtimeTask() throws Exception { ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); String response = responseCaptor.getValue(); - assertTrue(response.contains(String.format("id=%s", detectorId))); - assertTrue(response.contains(String.format("name=%s", detectorName))); - assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", hits.length))); + assertTrue(response.contains(String.format(Locale.ROOT, "id=%s", detectorId))); + assertTrue(response.contains(String.format(Locale.ROOT, "name=%s", detectorName))); + assertTrue(response.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", hits.length))); } @Test @@ -275,9 +275,9 @@ public void testRunWithTaskStateCreated() throws Exception { ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); String response = responseCaptor.getValue(); - assertTrue(response.contains(String.format("id=%s", detectorId))); - assertTrue(response.contains(String.format("name=%s", detectorName))); - assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", hits.length))); + assertTrue(response.contains(String.format(Locale.ROOT, "id=%s", detectorId))); + assertTrue(response.contains(String.format(Locale.ROOT, "name=%s", detectorName))); + assertTrue(response.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", hits.length))); } @Test @@ -309,13 +309,13 @@ public void testRunWithTaskStateVariousFailed() throws Exception { ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); String response = responseCaptor.getValue(); - assertTrue(response.contains(String.format("id=%s", detectorId1))); - assertTrue(response.contains(String.format("name=%s", detectorName1))); - assertTrue(response.contains(String.format("id=%s", detectorId2))); - assertTrue(response.contains(String.format("name=%s", detectorName2))); - assertTrue(response.contains(String.format("id=%s", detectorId3))); - assertTrue(response.contains(String.format("name=%s", detectorName3))); - assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", hits.length))); + assertTrue(response.contains(String.format(Locale.ROOT, "id=%s", detectorId1))); + assertTrue(response.contains(String.format(Locale.ROOT, "name=%s", detectorName1))); + assertTrue(response.contains(String.format(Locale.ROOT, "id=%s", detectorId2))); + assertTrue(response.contains(String.format(Locale.ROOT, "name=%s", detectorName2))); + assertTrue(response.contains(String.format(Locale.ROOT, "id=%s", detectorId3))); + assertTrue(response.contains(String.format(Locale.ROOT, "name=%s", detectorName3))); + assertTrue(response.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", hits.length))); } @Test @@ -347,11 +347,11 @@ public void testRunWithCombinedDetectorStatesTrue() throws Exception { ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); String response = responseCaptor.getValue(); - assertTrue(response.contains(String.format("id=%s", detectorId1))); - assertTrue(response.contains(String.format("name=%s", detectorName1))); - assertTrue(response.contains(String.format("id=%s", detectorId3))); - assertTrue(response.contains(String.format("name=%s", detectorName3))); - assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", 2))); + assertTrue(response.contains(String.format(Locale.ROOT, "id=%s", detectorId1))); + assertTrue(response.contains(String.format(Locale.ROOT, "name=%s", detectorName1))); + assertTrue(response.contains(String.format(Locale.ROOT, "id=%s", detectorId3))); + assertTrue(response.contains(String.format(Locale.ROOT, "name=%s", detectorName3))); + assertTrue(response.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", 2))); } @Test @@ -414,9 +414,9 @@ public void testRunWithCombinedDetectorStatesMixed() throws Exception { ArgumentCaptor responseCaptor = ArgumentCaptor.forClass(String.class); verify(listener, times(1)).onResponse(responseCaptor.capture()); String response = responseCaptor.getValue(); - assertTrue(response.contains(String.format("id=%s", detectorId1))); - assertTrue(response.contains(String.format("name=%s", detectorName1))); - assertTrue(response.contains(String.format("TotalAnomalyDetectors=%d", 1))); + assertTrue(response.contains(String.format(Locale.ROOT, "id=%s", detectorId1))); + assertTrue(response.contains(String.format(Locale.ROOT, "name=%s", detectorName1))); + assertTrue(response.contains(String.format(Locale.ROOT, "TotalAnomalyDetectors=%d", 1))); } @Test diff --git a/src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java b/src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java index 0a9e192e..46234ea6 100644 --- a/src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java +++ b/src/test/java/org/opensearch/integTest/SearchAnomalyResultsToolIT.java @@ -117,7 +117,10 @@ public void testSearchAnomalyResultsToolInFlowAgent_complexParams() { + "\"realTime\": true, \"anomalyGradeThreshold\": 0, \"sortOrder\": \"asc\"," + "\"sortString\": \"data_start_time\", \"size\": 10, \"startIndex\": 0 }}"; String result = executeAgent(agentId, agentInput); - assertTrue(result.contains(String.format("TotalAnomalyResults=%d", 1))); + assertTrue( + String.format(Locale.ROOT, "total anomaly results is not 1, result: %s", result), + result.contains(String.format(Locale.ROOT, "TotalAnomalyResults=%d", 1)) + ); } @SneakyThrows diff --git a/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java b/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java index cd3eca09..b0ee3503 100644 --- a/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java +++ b/src/test/java/org/opensearch/integTest/SearchMonitorsToolIT.java @@ -9,6 +9,7 @@ import java.nio.file.Files; import java.nio.file.Path; +import java.util.Locale; import org.junit.After; import org.junit.Before; @@ -65,7 +66,7 @@ public void testSearchMonitorsToolInFlowAgent_searchById() { String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"monitorId\": \"" + monitorId + "\"}}"; String result = executeAgent(agentId, agentInput); - assertTrue(result.contains(String.format("name=%s", monitorName))); + assertTrue(result.contains(String.format(Locale.ROOT, "name=%s", monitorName))); assertTrue(result.contains("TotalMonitors=1")); deleteMonitor(monitorId); } @@ -78,7 +79,7 @@ public void testSearchMonitorsToolInFlowAgent_singleMonitor_noFilter() { String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{}}"; String result = executeAgent(agentId, agentInput); - assertTrue(result.contains(String.format("name=%s", monitorName))); + assertTrue(result.contains(String.format(Locale.ROOT, "name=%s", monitorName))); assertTrue(result.contains("TotalMonitors=1")); deleteMonitor(monitorId); } @@ -101,8 +102,8 @@ public void testSearchMonitorsToolInFlowAgent_multipleMonitors_noFilter() { String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{}}"; String result = executeAgent(agentId, agentInput); - assertTrue(result.contains(String.format("name=%s", monitorName))); - assertTrue(result.contains(String.format("name=%s", monitorName2))); + assertTrue(result.contains(String.format(Locale.ROOT, "name=%s", monitorName))); + assertTrue(result.contains(String.format(Locale.ROOT, "name=%s", monitorName2))); assertTrue(result.contains("enabled=true")); assertTrue(result.contains("enabled=false")); assertTrue(result.contains("TotalMonitors=2")); @@ -119,8 +120,8 @@ public void testSearchMonitorsToolInFlowAgent_multipleMonitors_filter() { String agentId = createAgent(registerAgentRequestBody); String agentInput = "{\"parameters\":{\"monitorName\": \"" + monitorName + "\"}}"; String result = executeAgent(agentId, agentInput); - assertTrue(result.contains(String.format("name=%s", monitorName))); - assertFalse(result.contains(String.format("name=%s", monitorName2))); + assertTrue(result.contains(String.format(Locale.ROOT, "name=%s", monitorName))); + assertFalse(result.contains(String.format(Locale.ROOT, "name=%s", monitorName2))); assertTrue(result.contains("enabled=true")); assertTrue(result.contains("TotalMonitors=1")); deleteMonitor(monitorId1); From 4039bf7790dece12ba96037c4e8859a8e1275417 Mon Sep 17 00:00:00 2001 From: zane-neo Date: Fri, 21 Jun 2024 17:31:56 +0800 Subject: [PATCH 097/119] change log level to error (#339) Signed-off-by: zane-neo --- src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java index d24afd04..658a3fc7 100644 --- a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java +++ b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java @@ -189,7 +189,7 @@ protected Map waitResponseMeetingCondition( if (condition.test(responseInMap)) { return responseInMap; } - logger.info("The " + i + "-th response: " + responseInMap.toString()); + logger.error(String.format(Locale.ROOT, "The %s-th response: %s", i, responseInMap.toString())); Thread.sleep(DEFAULT_TASK_RESULT_QUERY_INTERVAL_IN_MILLISECOND); } fail("The response failed to meet condition after " + MAX_TASK_RESULT_QUERY_TIME_IN_SECOND + " seconds."); From 08f41b81ff49597bdc14d76eac53d0ad76c1adf3 Mon Sep 17 00:00:00 2001 From: zane-neo Date: Thu, 4 Jul 2024 14:31:20 +0800 Subject: [PATCH 098/119] fix compilation error and change gradle version to compatible with jdk21 (#345) * fix compilation error and change gradle version to compatible with jdk21 Signed-off-by: zane-neo * change gradle wrapper distribution checksum Signed-off-by: zane-neo --------- Signed-off-by: zane-neo --- gradle/wrapper/gradle-wrapper.properties | 4 ++-- .../org/opensearch/agent/tools/SearchMonitorsToolTests.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 3999f7f3..b6114cae 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-bin.zip networkTimeout=10000 zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=3e1af3ae886920c3ac87f7a91f816c0c7c436f276a6eefdb3da152100fef72ae +distributionSha256Sum=9631d53cf3e74bfa726893aee1f8994fee4e060c401335946dba2156f440f24c diff --git a/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java index 00bfd4d1..250ce5a2 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchMonitorsToolTests.java @@ -81,7 +81,7 @@ public void setup() { new CronSchedule("31 * * * *", ZoneId.of("Asia/Kolkata"), null), Instant.now(), Instant.now(), - Monitor.MonitorType.QUERY_LEVEL_MONITOR, + Monitor.MonitorType.QUERY_LEVEL_MONITOR.getValue(), new User("test-user", Collections.emptyList(), Collections.emptyList(), Collections.emptyList()), 0, Collections.emptyList(), From 5340662cbcf58a8204d9b15c84b222503a7a5120 Mon Sep 17 00:00:00 2001 From: zhichao-aws Date: Tue, 16 Jul 2024 18:24:49 +0800 Subject: [PATCH 099/119] backport (#352) Signed-off-by: zhichao-aws Co-authored-by: zane-neo --- .github/workflows/ci.yml | 2 ++ .github/workflows/test_security.yml | 3 ++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c5ba123a..8edd403c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,6 +36,8 @@ jobs: strategy: matrix: java: [11, 17, 21] + env: + ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true name: Build and Test skills plugin on Linux runs-on: ubuntu-latest container: diff --git a/.github/workflows/test_security.yml b/.github/workflows/test_security.yml index 2509d678..43274ed5 100644 --- a/.github/workflows/test_security.yml +++ b/.github/workflows/test_security.yml @@ -17,7 +17,8 @@ jobs: strategy: matrix: java: [11, 17, 21] - + env: + ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true name: Run Security Integration Tests on Linux runs-on: ubuntu-latest needs: Get-CI-Image-Tag From 858e1e303e0360e097c969a2d677839900233495 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 17 Jul 2024 10:50:35 +0800 Subject: [PATCH 100/119] [Feature] support nested query in neural sparse tool, vectorDB tool and RAG tool (#350) (#351) * support nested query in neural sparse * support nested in vector db tool * add test for RAG tool pass nested path * keep the 1st digit for score * lint --------- (cherry picked from commit 7a5d0d8f8a6f83aa35dacc9a64b3c70b5937bef0) Signed-off-by: zhichao-aws Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] Co-authored-by: zhichao-aws --- .../agent/tools/NeuralSparseSearchTool.java | 33 +++++++- .../opensearch/agent/tools/VectorDBTool.java | 34 +++++++- .../tools/NeuralSparseSearchToolTests.java | 22 +++++ .../opensearch/agent/tools/RAGToolTests.java | 10 ++- .../agent/tools/VectorDBToolTests.java | 22 +++++ .../integTest/NeuralSparseSearchToolIT.java | 62 +++++++++++++- .../opensearch/integTest/VectorDBToolIT.java | 80 +++++++++++++++++++ ...eural_sparse_search_tool_request_body.json | 3 +- ...w_agent_of_vectordb_tool_request_body.json | 3 +- 9 files changed, 259 insertions(+), 10 deletions(-) diff --git a/src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java b/src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java index cbe0d393..60168603 100644 --- a/src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java +++ b/src/main/java/org/opensearch/agent/tools/NeuralSparseSearchTool.java @@ -33,10 +33,12 @@ public class NeuralSparseSearchTool extends AbstractRetrieverTool { public static final String TYPE = "NeuralSparseSearchTool"; public static final String MODEL_ID_FIELD = "model_id"; public static final String EMBEDDING_FIELD = "embedding_field"; + public static final String NESTED_PATH_FIELD = "nested_path"; private String name = TYPE; private String modelId; private String embeddingField; + private String nestedPath; @Builder public NeuralSparseSearchTool( @@ -46,11 +48,13 @@ public NeuralSparseSearchTool( String embeddingField, String[] sourceFields, Integer docSize, - String modelId + String modelId, + String nestedPath ) { super(client, xContentRegistry, index, sourceFields, docSize); this.modelId = modelId; this.embeddingField = embeddingField; + this.nestedPath = nestedPath; } @Override @@ -61,8 +65,29 @@ protected String getQueryBody(String queryText) { ); } - Map queryBody = Map - .of("query", Map.of("neural_sparse", Map.of(embeddingField, Map.of("query_text", queryText, "model_id", modelId)))); + Map queryBody; + if (StringUtils.isBlank(nestedPath)) { + queryBody = Map + .of("query", Map.of("neural_sparse", Map.of(embeddingField, Map.of("query_text", queryText, "model_id", modelId)))); + } else { + queryBody = Map + .of( + "query", + Map + .of( + "nested", + Map + .of( + "path", + nestedPath, + "score_mode", + "max", + "query", + Map.of("neural_sparse", Map.of(embeddingField, Map.of("query_text", queryText, "model_id", modelId))) + ) + ) + ); + } try { return AccessController.doPrivileged((PrivilegedExceptionAction) () -> gson.toJson(queryBody)); @@ -99,6 +124,7 @@ public NeuralSparseSearchTool create(Map params) { String[] sourceFields = gson.fromJson((String) params.get(SOURCE_FIELD), String[].class); String modelId = (String) params.get(MODEL_ID_FIELD); Integer docSize = params.containsKey(DOC_SIZE_FIELD) ? Integer.parseInt((String) params.get(DOC_SIZE_FIELD)) : DEFAULT_DOC_SIZE; + String nestedPath = (String) params.get(NESTED_PATH_FIELD); return NeuralSparseSearchTool .builder() .client(client) @@ -108,6 +134,7 @@ public NeuralSparseSearchTool create(Map params) { .sourceFields(sourceFields) .modelId(modelId) .docSize(docSize) + .nestedPath(nestedPath) .build(); } diff --git a/src/main/java/org/opensearch/agent/tools/VectorDBTool.java b/src/main/java/org/opensearch/agent/tools/VectorDBTool.java index 4b5b41fa..d397060e 100644 --- a/src/main/java/org/opensearch/agent/tools/VectorDBTool.java +++ b/src/main/java/org/opensearch/agent/tools/VectorDBTool.java @@ -38,11 +38,13 @@ public class VectorDBTool extends AbstractRetrieverTool { public static final String EMBEDDING_FIELD = "embedding_field"; public static final String K_FIELD = "k"; public static final Integer DEFAULT_K = 10; + public static final String NESTED_PATH_FIELD = "nested_path"; private String name = TYPE; private String modelId; private String embeddingField; private Integer k; + private String nestedPath; @Builder public VectorDBTool( @@ -53,12 +55,14 @@ public VectorDBTool( String[] sourceFields, Integer docSize, String modelId, - Integer k + Integer k, + String nestedPath ) { super(client, xContentRegistry, index, sourceFields, docSize); this.modelId = modelId; this.embeddingField = embeddingField; this.k = k; + this.nestedPath = nestedPath; } @Override @@ -69,8 +73,30 @@ protected String getQueryBody(String queryText) { ); } - Map queryBody = Map - .of("query", Map.of("neural", Map.of(embeddingField, Map.of("query_text", queryText, "model_id", modelId, "k", k)))); + Map queryBody; + if (StringUtils.isBlank(nestedPath)) { + queryBody = Map + .of("query", Map.of("neural", Map.of(embeddingField, Map.of("query_text", queryText, "model_id", modelId, "k", k)))); + + } else { + queryBody = Map + .of( + "query", + Map + .of( + "nested", + Map + .of( + "path", + nestedPath, + "score_mode", + "max", + "query", + Map.of("neural", Map.of(embeddingField, Map.of("query_text", queryText, "model_id", modelId, "k", k))) + ) + ) + ); + } try { return AccessController.doPrivileged((PrivilegedExceptionAction) () -> gson.toJson(queryBody)); @@ -108,6 +134,7 @@ public VectorDBTool create(Map params) { String modelId = (String) params.get(MODEL_ID_FIELD); Integer docSize = params.containsKey(DOC_SIZE_FIELD) ? Integer.parseInt((String) params.get(DOC_SIZE_FIELD)) : DEFAULT_DOC_SIZE; Integer k = params.containsKey(K_FIELD) ? Integer.parseInt((String) params.get(K_FIELD)) : DEFAULT_K; + String nestedPath = (String) params.get(NESTED_PATH_FIELD); return VectorDBTool .builder() .client(client) @@ -118,6 +145,7 @@ public VectorDBTool create(Map params) { .modelId(modelId) .docSize(docSize) .k(k) + .nestedPath(nestedPath) .build(); } diff --git a/src/test/java/org/opensearch/agent/tools/NeuralSparseSearchToolTests.java b/src/test/java/org/opensearch/agent/tools/NeuralSparseSearchToolTests.java index 4491db43..d6d14991 100644 --- a/src/test/java/org/opensearch/agent/tools/NeuralSparseSearchToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/NeuralSparseSearchToolTests.java @@ -23,6 +23,7 @@ public class NeuralSparseSearchToolTests { public static final String TEST_QUERY_TEXT = "123fsd23134sdfouh"; public static final String TEST_EMBEDDING_FIELD = "test embedding"; public static final String TEST_MODEL_ID = "123fsd23134"; + public static final String TEST_NESTED_PATH = "nested_path"; private Map params = new HashMap<>(); @Before @@ -60,6 +61,22 @@ public void testGetQueryBody() { assertEquals("123fsd23134", queryBody.get("query").get("neural_sparse").get("test embedding").get("model_id")); } + @Test + @SneakyThrows + public void testGetQueryBodyWithNestedPath() { + params.put(NeuralSparseSearchTool.NESTED_PATH_FIELD, TEST_NESTED_PATH); + NeuralSparseSearchTool tool = NeuralSparseSearchTool.Factory.getInstance().create(params); + Map>> nestedQueryBody = gson.fromJson(tool.getQueryBody(TEST_QUERY_TEXT), Map.class); + assertEquals("nested_path", nestedQueryBody.get("query").get("nested").get("path")); + assertEquals("max", nestedQueryBody.get("query").get("nested").get("score_mode")); + Map>> queryBody = (Map>>) nestedQueryBody + .get("query") + .get("nested") + .get("query"); + assertEquals("123fsd23134sdfouh", queryBody.get("neural_sparse").get("test embedding").get("query_text")); + assertEquals("123fsd23134", queryBody.get("neural_sparse").get("test embedding").get("model_id")); + } + @Test @SneakyThrows public void testGetQueryBodyWithJsonObjectString() { @@ -110,6 +127,11 @@ public void testCreateToolsParseParams() { () -> NeuralSparseSearchTool.Factory.getInstance().create(Map.of(NeuralSparseSearchTool.MODEL_ID_FIELD, 123)) ); + assertThrows( + ClassCastException.class, + () -> NeuralSparseSearchTool.Factory.getInstance().create(Map.of(NeuralSparseSearchTool.NESTED_PATH_FIELD, 123)) + ); + assertThrows( JsonSyntaxException.class, () -> NeuralSparseSearchTool.Factory.getInstance().create(Map.of(NeuralSparseSearchTool.SOURCE_FIELD, "123")) diff --git a/src/test/java/org/opensearch/agent/tools/RAGToolTests.java b/src/test/java/org/opensearch/agent/tools/RAGToolTests.java index 4696c12c..0f19f91a 100644 --- a/src/test/java/org/opensearch/agent/tools/RAGToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/RAGToolTests.java @@ -55,6 +55,7 @@ public class RAGToolTests { public static final String TEST_INFERENCE_MODEL_ID = "1234"; public static final String TEST_NEURAL_QUERY_TYPE = "neural"; public static final String TEST_NEURAL_SPARSE_QUERY_TYPE = "neural_sparse"; + public static final String TEST_NESTED_PATH = "nested_path"; static public final NamedXContentRegistry TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY = getQueryNamedXContentRegistry(); private RAGTool ragTool; @@ -422,6 +423,7 @@ public void testFactoryNeuralQuery() { assertEquals(factoryMock.getDefaultVersion(), null); assertNotNull(RAGTool.Factory.getInstance()); + params.put(VectorDBTool.NESTED_PATH_FIELD, TEST_NESTED_PATH); RAGTool rAGtool1 = factoryMock.create(params); VectorDBTool.Factory.getInstance().init(client, TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY); params.put(VectorDBTool.MODEL_ID_FIELD, TEST_EMBEDDING_MODEL_ID); @@ -436,6 +438,7 @@ public void testFactoryNeuralQuery() { assertEquals(rAGtool1.getQueryTool().getSourceFields(), rAGtool2.getQueryTool().getSourceFields()); assertEquals(rAGtool1.getXContentRegistry(), rAGtool2.getXContentRegistry()); assertEquals(rAGtool1.getQueryType(), rAGtool2.getQueryType()); + assertEquals(((VectorDBTool) rAGtool1.getQueryTool()).getNestedPath(), ((VectorDBTool) rAGtool2.getQueryTool()).getNestedPath()); } @Test @@ -450,6 +453,8 @@ public void testFactoryNeuralSparseQuery() { assertEquals(factoryMock.getDefaultType(), RAGTool.TYPE); assertEquals(factoryMock.getDefaultVersion(), null); + params.put(NeuralSparseSearchTool.NESTED_PATH_FIELD, TEST_NESTED_PATH); + params.put("query_type", "neural_sparse"); RAGTool rAGtool1 = factoryMock.create(params); NeuralSparseSearchTool.Factory.getInstance().init(client, TEST_XCONTENT_REGISTRY_FOR_NEURAL_QUERY); NeuralSparseSearchTool queryTool = NeuralSparseSearchTool.Factory.getInstance().create(params); @@ -463,7 +468,10 @@ public void testFactoryNeuralSparseQuery() { assertEquals(rAGtool1.getQueryTool().getSourceFields(), rAGtool2.getQueryTool().getSourceFields()); assertEquals(rAGtool1.getXContentRegistry(), rAGtool2.getXContentRegistry()); assertEquals(rAGtool1.getQueryType(), rAGtool2.getQueryType()); - + assertEquals( + ((NeuralSparseSearchTool) rAGtool1.getQueryTool()).getNestedPath(), + ((NeuralSparseSearchTool) rAGtool2.getQueryTool()).getNestedPath() + ); } private static NamedXContentRegistry getQueryNamedXContentRegistry() { diff --git a/src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java b/src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java index 849f9254..635724a7 100644 --- a/src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/VectorDBToolTests.java @@ -24,6 +24,7 @@ public class VectorDBToolTests { public static final String TEST_EMBEDDING_FIELD = "test embedding"; public static final String TEST_MODEL_ID = "123fsd23134"; public static final Integer TEST_K = 123; + public static final String TEST_NESTED_PATH = "nested_path"; private Map params = new HashMap<>(); @Before @@ -61,6 +62,22 @@ public void testGetQueryBody() { assertEquals(123.0, queryBody.get("query").get("neural").get("test embedding").get("k")); } + @Test + @SneakyThrows + public void testGetQueryBodyWithNestedPath() { + params.put(VectorDBTool.NESTED_PATH_FIELD, TEST_NESTED_PATH); + VectorDBTool tool = VectorDBTool.Factory.getInstance().create(params); + Map>> nestedQueryBody = gson.fromJson(tool.getQueryBody(TEST_QUERY_TEXT), Map.class); + assertEquals("nested_path", nestedQueryBody.get("query").get("nested").get("path")); + assertEquals("max", nestedQueryBody.get("query").get("nested").get("score_mode")); + Map>> queryBody = (Map>>) nestedQueryBody + .get("query") + .get("nested") + .get("query"); + assertEquals("123fsd23134sdfouh", queryBody.get("neural").get("test embedding").get("query_text")); + assertEquals("123fsd23134", queryBody.get("neural").get("test embedding").get("model_id")); + } + @Test @SneakyThrows public void testGetQueryBodyWithJsonObjectString() { @@ -103,6 +120,11 @@ public void testCreateToolsParseParams() { assertThrows(ClassCastException.class, () -> VectorDBTool.Factory.getInstance().create(Map.of(VectorDBTool.MODEL_ID_FIELD, 123))); + assertThrows( + ClassCastException.class, + () -> VectorDBTool.Factory.getInstance().create(Map.of(VectorDBTool.NESTED_PATH_FIELD, 123)) + ); + assertThrows(JsonSyntaxException.class, () -> VectorDBTool.Factory.getInstance().create(Map.of(VectorDBTool.SOURCE_FIELD, "123"))); // although it will be parsed as integer, but the parameters value should always be String diff --git a/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java b/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java index 3758c84d..b7618468 100644 --- a/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java +++ b/src/test/java/org/opensearch/integTest/NeuralSparseSearchToolIT.java @@ -7,7 +7,6 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; -import static org.opensearch.ml.common.utils.StringUtils.gson; import java.nio.file.Files; import java.nio.file.Path; @@ -22,6 +21,7 @@ public class NeuralSparseSearchToolIT extends BaseAgentToolsIT { public static String TEST_INDEX_NAME = "test_index"; + public static String TEST_NESTED_INDEX_NAME = "test_index_nested"; private String modelId; private String registerAgentRequestBody; @@ -64,12 +64,55 @@ private void prepareIndex() { addDocToIndex(TEST_INDEX_NAME, "2", List.of("text", "embedding"), List.of("text doc 3", Map.of("test", 5, "a", 6))); } + @SneakyThrows + private void prepareNestedIndex() { + createIndexWithConfiguration( + TEST_NESTED_INDEX_NAME, + "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"text\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"embedding\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\":{\n" + + " \"sparse\":{\n" + + " \"type\":\"rank_features\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}" + ); + addDocToIndex( + TEST_NESTED_INDEX_NAME, + "0", + List.of("text", "embedding"), + List.of("text doc 1", Map.of("sparse", List.of(Map.of("hello", 1, "world", 2)))) + ); + addDocToIndex( + TEST_NESTED_INDEX_NAME, + "1", + List.of("text", "embedding"), + List.of("text doc 2", Map.of("sparse", List.of(Map.of("a", 3, "b", 4)))) + ); + addDocToIndex( + TEST_NESTED_INDEX_NAME, + "2", + List.of("text", "embedding"), + List.of("text doc 3", Map.of("sparse", List.of(Map.of("test", 5, "a", 6)))) + ); + } + @Before @SneakyThrows public void setUp() { super.setUp(); prepareModel(); prepareIndex(); + prepareNestedIndex(); registerAgentRequestBody = Files .readString( Path @@ -127,6 +170,23 @@ public void testNeuralSparseSearchToolInFlowAgent() { ); } + public void testNeuralSparseSearchToolInFlowAgent_withNestedIndex() { + String registerAgentRequestBodyNested = registerAgentRequestBody; + registerAgentRequestBodyNested = registerAgentRequestBodyNested.replace("\"nested_path\": \"\"", "\"nested_path\": \"embedding\""); + registerAgentRequestBodyNested = registerAgentRequestBodyNested + .replace("\"embedding_field\": \"embedding\"", "\"embedding_field\": \"embedding.sparse\""); + registerAgentRequestBodyNested = registerAgentRequestBodyNested + .replace("\"index\": \"test_index\"", "\"index\": \"test_index_nested\""); + String agentId = createAgent(registerAgentRequestBodyNested); + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}"); + assertEquals( + "The agent execute response not equal with expected.", + "{\"_index\":\"test_index_nested\",\"_source\":{\"text\":\"text doc 3\"},\"_id\":\"2\",\"_score\":2.4136734}\n" + + "{\"_index\":\"test_index_nested\",\"_source\":{\"text\":\"text doc 2\"},\"_id\":\"1\",\"_score\":1.2068367}\n", + result + ); + } + public void testNeuralSparseSearchToolInFlowAgent_withIllegalSourceField_thenGetEmptySource() { String agentId = createAgent(registerAgentRequestBody.replace("text", "text2")); String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}"); diff --git a/src/test/java/org/opensearch/integTest/VectorDBToolIT.java b/src/test/java/org/opensearch/integTest/VectorDBToolIT.java index c494371a..3f7fc77e 100644 --- a/src/test/java/org/opensearch/integTest/VectorDBToolIT.java +++ b/src/test/java/org/opensearch/integTest/VectorDBToolIT.java @@ -22,6 +22,7 @@ public class VectorDBToolIT extends BaseAgentToolsIT { public static String TEST_INDEX_NAME = "test_index"; + public static String TEST_NESTED_INDEX_NAME = "test_index_nested"; private String modelId; private String registerAgentRequestBody; @@ -99,12 +100,75 @@ private void prepareIndex() { addDocToIndex(TEST_INDEX_NAME, "1", List.of("text"), List.of("a b")); } + @SneakyThrows + private void prepareNestedIndex() { + String pipelineConfig = "{\n" + + " \"description\": \"text embedding pipeline\",\n" + + " \"processors\": [\n" + + " {\n" + + " \"text_embedding\": {\n" + + " \"model_id\": \"" + + modelId + + "\",\n" + + " \"field_map\": {\n" + + " \"text\": \"embedding\"\n" + + " }\n" + + " }\n" + + " }\n" + + " ]\n" + + "}"; + createIngestPipelineWithConfiguration("test-embedding-model", pipelineConfig); + + String indexMapping = "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"text\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"embedding\": {\n" + + " \"type\":\"nested\",\n" + + " \"properties\":{\n" + + " \"knn\":{\n" + + " \"type\": \"knn_vector\",\n" + + " \"dimension\": 768,\n" + + " \"method\": {\n" + + " \"name\": \"hnsw\",\n" + + " \"space_type\": \"l2\",\n" + + " \"engine\": \"lucene\",\n" + + " \"parameters\": {\n" + + " \"ef_construction\": 128,\n" + + " \"m\": 24\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " \n" + + " }\n" + + " }\n" + + " },\n" + + " \"settings\": {\n" + + " \"index\": {\n" + + " \"knn.space_type\": \"cosinesimil\",\n" + + " \"default_pipeline\": \"test-embedding-model\",\n" + + " \"knn\": \"true\"\n" + + " }\n" + + " }\n" + + "}"; + + createIndexWithConfiguration(TEST_NESTED_INDEX_NAME, indexMapping); + + addDocToIndex(TEST_NESTED_INDEX_NAME, "0", List.of("text"), List.of(List.of("hello world"))); + + addDocToIndex(TEST_NESTED_INDEX_NAME, "1", List.of("text"), List.of(List.of("a b"))); + } + @Before @SneakyThrows public void setUp() { super.setUp(); prepareModel(); prepareIndex(); + prepareNestedIndex(); registerAgentRequestBody = Files .readString( Path @@ -157,6 +221,22 @@ public void testVectorDBToolInFlowAgent() { ); } + public void testVectorDBToolInFlowAgent_withNestedIndex() { + String registerAgentRequestBodyNested = registerAgentRequestBody; + registerAgentRequestBodyNested = registerAgentRequestBodyNested.replace("\"nested_path\": \"\"", "\"nested_path\": \"embedding\""); + registerAgentRequestBodyNested = registerAgentRequestBodyNested + .replace("\"embedding_field\": \"embedding\"", "\"embedding_field\": \"embedding.knn\""); + registerAgentRequestBodyNested = registerAgentRequestBodyNested + .replace("\"index\": \"test_index\"", "\"index\": \"test_index_nested\""); + String agentId = createAgent(registerAgentRequestBodyNested); + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}"); + // To allow digits variation from model output, using string contains to match + assertTrue( + result.contains("{\"_index\":\"test_index_nested\",\"_source\":{\"text\":[\"hello world\"]},\"_id\":\"0\",\"_score\":0.7") + ); + assertTrue(result.contains("{\"_index\":\"test_index_nested\",\"_source\":{\"text\":[\"a b\"]},\"_id\":\"1\",\"_score\":0.2")); + } + public void testVectorDBToolInFlowAgent_withIllegalSourceField_thenGetEmptySource() { String agentId = createAgent(registerAgentRequestBody.replace("text", "text2")); String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"a\"}}"); diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_neural_sparse_search_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_neural_sparse_search_tool_request_body.json index ac2a2987..579f0778 100644 --- a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_neural_sparse_search_tool_request_body.json +++ b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_neural_sparse_search_tool_request_body.json @@ -10,7 +10,8 @@ "index": "test_index", "embedding_field": "embedding", "source_field": ["text"], - "input": "${parameters.question}" + "input": "${parameters.question}", + "nested_path": "" } } ] diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_vectordb_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_vectordb_tool_request_body.json index 3b13e443..b1488388 100644 --- a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_vectordb_tool_request_body.json +++ b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_vectordb_tool_request_body.json @@ -10,7 +10,8 @@ "index": "test_index", "embedding_field": "embedding", "source_field": ["text"], - "input": "${parameters.question}" + "input": "${parameters.question}", + "nested_path": "" } } ] From 32382abd04d8de0ef76587bcb206fdd56e4f2145 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 17 Jul 2024 11:05:28 +0800 Subject: [PATCH 101/119] Add cluster setting to control ppl execution (#344) (#356) * Add cluster setting to control ppl execution * format code * format code * Add debug log to indicate the ppl execution settings * format code --------- (cherry picked from commit 14d9ef25a8f63b93d7fcbe5e6a725941b342ae4a) Signed-off-by: zane-neo Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../java/org/opensearch/agent/ToolPlugin.java | 14 ++- .../agent/common/SkillSettings.java | 22 ++++ .../org/opensearch/agent/tools/PPLTool.java | 105 +++++++++--------- .../tools/utils/ClusterSettingHelper.java | 35 ++++++ .../opensearch/agent/tools/PPLToolTests.java | 34 +++++- .../integTest/BaseAgentToolsIT.java | 1 + .../org/opensearch/integTest/PPLToolIT.java | 23 ++-- 7 files changed, 172 insertions(+), 62 deletions(-) create mode 100644 src/main/java/org/opensearch/agent/common/SkillSettings.java create mode 100644 src/main/java/org/opensearch/agent/tools/utils/ClusterSettingHelper.java diff --git a/src/main/java/org/opensearch/agent/ToolPlugin.java b/src/main/java/org/opensearch/agent/ToolPlugin.java index db07ac0b..d5c418ae 100644 --- a/src/main/java/org/opensearch/agent/ToolPlugin.java +++ b/src/main/java/org/opensearch/agent/ToolPlugin.java @@ -10,6 +10,7 @@ import java.util.List; import java.util.function.Supplier; +import org.opensearch.agent.common.SkillSettings; import org.opensearch.agent.tools.NeuralSparseSearchTool; import org.opensearch.agent.tools.PPLTool; import org.opensearch.agent.tools.RAGTool; @@ -18,9 +19,12 @@ import org.opensearch.agent.tools.SearchAnomalyResultsTool; import org.opensearch.agent.tools.SearchMonitorsTool; import org.opensearch.agent.tools.VectorDBTool; +import org.opensearch.agent.tools.utils.ClusterSettingHelper; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; @@ -59,8 +63,9 @@ public Collection createComponents( this.client = client; this.clusterService = clusterService; this.xContentRegistry = xContentRegistry; - - PPLTool.Factory.getInstance().init(client); + Settings settings = environment.settings(); + ClusterSettingHelper clusterSettingHelper = new ClusterSettingHelper(settings, clusterService); + PPLTool.Factory.getInstance().init(client, clusterSettingHelper); NeuralSparseSearchTool.Factory.getInstance().init(client, xContentRegistry); VectorDBTool.Factory.getInstance().init(client, xContentRegistry); RAGTool.Factory.getInstance().init(client, xContentRegistry); @@ -85,4 +90,9 @@ public List> getToolFactories() { SearchMonitorsTool.Factory.getInstance() ); } + + @Override + public List> getSettings() { + return List.of(SkillSettings.PPL_EXECUTION_ENABLED); + } } diff --git a/src/main/java/org/opensearch/agent/common/SkillSettings.java b/src/main/java/org/opensearch/agent/common/SkillSettings.java new file mode 100644 index 00000000..55808748 --- /dev/null +++ b/src/main/java/org/opensearch/agent/common/SkillSettings.java @@ -0,0 +1,22 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.common; + +import org.opensearch.common.settings.Setting; + +/** + * Settings for skills plugin + */ +public final class SkillSettings { + + private SkillSettings() {} + + /** + * This setting controls whether PPL execution is enabled or not + */ + public static final Setting PPL_EXECUTION_ENABLED = Setting + .boolSetting("plugins.skills.ppl_execution_enabled", false, Setting.Property.NodeScope, Setting.Property.Dynamic); +} diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index 25201fe1..3f8c728b 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -30,9 +30,9 @@ import org.json.JSONObject; import org.opensearch.action.ActionRequest; import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest; -import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchResponse; +import org.opensearch.agent.common.SkillSettings; +import org.opensearch.agent.tools.utils.ClusterSettingHelper; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.core.action.ActionListener; @@ -46,7 +46,6 @@ import org.opensearch.ml.common.output.model.ModelTensors; import org.opensearch.ml.common.spi.tools.Tool; import org.opensearch.ml.common.spi.tools.ToolAnnotation; -import org.opensearch.ml.common.transport.MLTaskResponse; import org.opensearch.ml.common.transport.prediction.MLPredictionTaskAction; import org.opensearch.ml.common.transport.prediction.MLPredictionTaskRequest; import org.opensearch.ml.repackage.com.google.common.collect.ImmutableMap; @@ -98,6 +97,8 @@ public class PPLTool implements Tool { private int head; + private ClusterSettingHelper clusterSettingHelper; + private static Gson gson = new Gson(); private static Map DEFAULT_PROMPT_DICT; @@ -127,12 +128,7 @@ public class PPLTool implements Tool { ALLOWED_FIELDS_TYPE.add("nested"); ALLOWED_FIELDS_TYPE.add("geo_point"); - try { - DEFAULT_PROMPT_DICT = loadDefaultPromptDict(); - } catch (IOException e) { - log.error("fail to load default prompt dict" + e.getMessage()); - DEFAULT_PROMPT_DICT = new HashMap<>(); - } + DEFAULT_PROMPT_DICT = loadDefaultPromptDict(); } public enum PPLModelType { @@ -156,6 +152,7 @@ public static PPLModelType from(String value) { public PPLTool( Client client, + ClusterSettingHelper clusterSettingHelper, String modelId, String contextPrompt, String pplModelType, @@ -167,18 +164,20 @@ public PPLTool( this.modelId = modelId; this.pplModelType = PPLModelType.from(pplModelType); if (contextPrompt.isEmpty()) { - this.contextPrompt = this.DEFAULT_PROMPT_DICT.getOrDefault(this.pplModelType.toString(), ""); + this.contextPrompt = DEFAULT_PROMPT_DICT.getOrDefault(this.pplModelType.toString(), ""); } else { this.contextPrompt = contextPrompt; } this.previousToolKey = previousToolKey; this.head = head; this.execute = execute; + this.clusterSettingHelper = clusterSettingHelper; } + @SuppressWarnings("unchecked") @Override public void run(Map parameters, ActionListener listener) { - parameters = extractFromChatParameters(parameters); + extractFromChatParameters(parameters); String indexName = getIndexNameFromParameters(parameters); if (StringUtils.isBlank(indexName)) { throw new IllegalArgumentException( @@ -197,14 +196,14 @@ public void run(Map parameters, ActionListener listener) } GetMappingsRequest getMappingsRequest = buildGetMappingRequest(indexName); - client.admin().indices().getMappings(getMappingsRequest, ActionListener.wrap(getMappingsResponse -> { + client.admin().indices().getMappings(getMappingsRequest, ActionListener.wrap(getMappingsResponse -> { Map mappings = getMappingsResponse.getMappings(); - if (mappings.size() == 0) { + if (mappings.isEmpty()) { throw new IllegalArgumentException("No matching mapping with index name: " + indexName); } String firstIndexName = (String) mappings.keySet().toArray()[0]; SearchRequest searchRequest = buildSearchRequest(firstIndexName); - client.search(searchRequest, ActionListener.wrap(searchResponse -> { + client.search(searchRequest, ActionListener.wrap(searchResponse -> { SearchHit[] searchHits = searchResponse.getHits().getHits(); String tableInfo = constructTableInfo(searchHits, mappings); String prompt = constructPrompt(tableInfo, question.strip(), indexName); @@ -216,13 +215,20 @@ public void run(Map parameters, ActionListener listener) modelId, MLInput.builder().algorithm(FunctionName.REMOTE).inputDataset(inputDataSet).build() ); - client.execute(MLPredictionTaskAction.INSTANCE, request, ActionListener.wrap(mlTaskResponse -> { + client.execute(MLPredictionTaskAction.INSTANCE, request, ActionListener.wrap(mlTaskResponse -> { ModelTensorOutput modelTensorOutput = (ModelTensorOutput) mlTaskResponse.getOutput(); ModelTensors modelTensors = modelTensorOutput.getMlModelOutputs().get(0); ModelTensor modelTensor = modelTensors.getMlModelTensors().get(0); Map dataAsMap = (Map) modelTensor.getDataAsMap(); String ppl = parseOutput(dataAsMap.get("response"), indexName); - if (!this.execute) { + boolean pplExecutedEnabled = clusterSettingHelper.getClusterSettings(SkillSettings.PPL_EXECUTION_ENABLED); + if (!pplExecutedEnabled || !this.execute) { + if (!pplExecutedEnabled) { + log + .debug( + "PPL execution is disabled, the query will be returned directly, to enable this, please set plugins.skills.ppl_execution_enabled to true" + ); + } Map ret = ImmutableMap.of("ppl", ppl); listener.onResponse((T) AccessController.doPrivileged((PrivilegedExceptionAction) () -> gson.toJson(ret))); return; @@ -234,7 +240,7 @@ public void run(Map parameters, ActionListener listener) .execute( PPLQueryAction.INSTANCE, transportPPLQueryRequest, - getPPLTransportActionListener(ActionListener.wrap(transportPPLQueryResponse -> { + getPPLTransportActionListener(ActionListener.wrap(transportPPLQueryResponse -> { String results = transportPPLQueryResponse.getResult(); Map returnResults = ImmutableMap.of("ppl", ppl, "executionResult", results); listener @@ -250,17 +256,15 @@ public void run(Map parameters, ActionListener listener) ); // Execute output here }, e -> { - log.info("fail to predict model: " + e); + log.error(String.format(Locale.ROOT, "fail to predict model: %s with error: %s", modelId, e.getMessage()), e); listener.onFailure(e); })); }, e -> { - log.info("fail to search: " + e); + log.error(String.format(Locale.ROOT, "fail to search model: %s with error: %s", modelId, e.getMessage()), e); listener.onFailure(e); - } - - )); + })); }, e -> { - log.info("fail to get mapping: " + e); + log.error(String.format(Locale.ROOT, "fail to get mapping of index: %s with error: %s", indexName, e.getMessage()), e); String errorMessage = e.getMessage(); if (errorMessage.contains("no such index")) { listener @@ -287,15 +291,14 @@ public String getName() { @Override public boolean validate(Map parameters) { - if (parameters == null || parameters.size() == 0) { - return false; - } - return true; + return parameters != null && !parameters.isEmpty(); } public static class Factory implements Tool.Factory { private Client client; + private ClusterSettingHelper clusterSettingHelper; + private static Factory INSTANCE; public static Factory getInstance() { @@ -311,8 +314,9 @@ public static Factory getInstance() { } } - public void init(Client client) { + public void init(Client client, ClusterSettingHelper clusterSettingHelper) { this.client = client; + this.clusterSettingHelper = clusterSettingHelper; } @Override @@ -320,12 +324,13 @@ public PPLTool create(Map map) { validatePPLToolParameters(map); return new PPLTool( client, + clusterSettingHelper, (String) map.get("model_id"), (String) map.getOrDefault("prompt", ""), (String) map.getOrDefault("model_type", ""), (String) map.getOrDefault("previous_tool_name", ""), - Integer.valueOf((String) map.getOrDefault("head", "-1")), - Boolean.valueOf((String) map.getOrDefault("execute", "true")) + NumberUtils.toInt((String) map.get("head"), -1), + Boolean.parseBoolean((String) map.getOrDefault("execute", "true")) ); } @@ -350,8 +355,7 @@ private SearchRequest buildSearchRequest(String indexName) { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(1).query(new MatchAllQueryBuilder()); // client; - SearchRequest request = new SearchRequest(new String[] { indexName }, searchSourceBuilder); - return request; + return new SearchRequest(new String[] { indexName }, searchSourceBuilder); } private GetMappingsRequest buildGetMappingRequest(String indexName) { @@ -421,19 +425,17 @@ private String constructTableInfo(SearchHit[] searchHits, Map indexInfo = ImmutableMap.of("mappingInfo", tableInfo, "question", question, "indexName", indexName); StringSubstitutor substitutor = new StringSubstitutor(indexInfo, "${indexInfo.", "}"); - String finalPrompt = substitutor.replace(contextPrompt); - return finalPrompt; + return substitutor.replace(contextPrompt); } private void extractNamesTypes(Map mappingSource, Map fieldsToType, String prefix) { - if (prefix.length() > 0) { + if (!prefix.isEmpty()) { prefix += "."; } @@ -456,7 +458,7 @@ private void extractNamesTypes(Map mappingSource, Map sampleSource, Map fieldsToSample, String prefix) throws PrivilegedActionException { - if (prefix.length() > 0) { + if (!prefix.isEmpty()) { prefix += "."; } @@ -479,16 +481,17 @@ private ActionListener getPPLTransportActionListen return ActionListener.wrap(r -> { listener.onResponse(TransportPPLQueryResponse.fromActionResponse(r)); }, listener::onFailure); } - private Map extractFromChatParameters(Map parameters) { + @SuppressWarnings("unchecked") + private void extractFromChatParameters(Map parameters) { if (parameters.containsKey("input")) { + String input = parameters.get("input"); try { - Map chatParameters = gson.fromJson(parameters.get("input"), Map.class); + Map chatParameters = gson.fromJson(input, Map.class); parameters.putAll(chatParameters); - } finally { - return parameters; + } catch (Exception e) { + log.error(String.format(Locale.ROOT, "Failed to parse chat parameters, input is: %s, which is not a valid json", input), e); } } - return parameters; } private String parseOutput(String llmOutput, String indexName) { @@ -552,14 +555,16 @@ private String getIndexNameFromParameters(Map parameters) { return indexName.trim(); } - private static Map loadDefaultPromptDict() throws IOException { - InputStream searchResponseIns = PPLTool.class.getResourceAsStream("PPLDefaultPrompt.json"); - if (searchResponseIns != null) { - String defaultPromptContent = new String(searchResponseIns.readAllBytes(), StandardCharsets.UTF_8); - Map defaultPromptDict = gson.fromJson(defaultPromptContent, Map.class); - return defaultPromptDict; + @SuppressWarnings("unchecked") + private static Map loadDefaultPromptDict() { + try (InputStream searchResponseIns = PPLTool.class.getResourceAsStream("PPLDefaultPrompt.json")) { + if (searchResponseIns != null) { + String defaultPromptContent = new String(searchResponseIns.readAllBytes(), StandardCharsets.UTF_8); + return gson.fromJson(defaultPromptContent, Map.class); + } + } catch (IOException e) { + log.error("Failed to load default prompt dict", e); } return new HashMap<>(); } - } diff --git a/src/main/java/org/opensearch/agent/tools/utils/ClusterSettingHelper.java b/src/main/java/org/opensearch/agent/tools/utils/ClusterSettingHelper.java new file mode 100644 index 00000000..92bf9dcd --- /dev/null +++ b/src/main/java/org/opensearch/agent/tools/utils/ClusterSettingHelper.java @@ -0,0 +1,35 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools.utils; + +import java.util.Optional; + +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; + +import lombok.AllArgsConstructor; + +/** + * This class is to encapsulate the {@link Settings} and {@link ClusterService} and provide a general method to retrieve dynamical cluster settings conveniently. + */ +@AllArgsConstructor +public class ClusterSettingHelper { + + private Settings settings; + + private ClusterService clusterService; + + /** + * Retrieves the cluster settings for the specified setting. + * + * @param setting the setting to retrieve cluster settings for + * @return the cluster setting value, or the default setting value if not found + */ + public T getClusterSettings(Setting setting) { + return Optional.ofNullable(clusterService.getClusterSettings().get(setting)).orElse(setting.get(settings)); + } +} diff --git a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java index 25fe62a9..8e2c3aaa 100644 --- a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java @@ -9,6 +9,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.opensearch.ml.common.CommonValue.ML_CONNECTOR_INDEX; import static org.opensearch.ml.common.utils.StringUtils.gson; @@ -16,6 +17,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.Set; import org.apache.lucene.search.TotalHits; import org.junit.Before; @@ -24,10 +26,15 @@ import org.mockito.MockitoAnnotations; import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.opensearch.action.search.SearchResponse; +import org.opensearch.agent.common.SkillSettings; +import org.opensearch.agent.tools.utils.ClusterSettingHelper; import org.opensearch.client.AdminClient; import org.opensearch.client.Client; import org.opensearch.client.IndicesAdminClient; import org.opensearch.cluster.metadata.MappingMetadata; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; import org.opensearch.core.action.ActionListener; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; @@ -122,7 +129,12 @@ public void setup() { return null; }).when(client).execute(eq(PPLQueryAction.INSTANCE), any(), any()); - PPLTool.Factory.getInstance().init(client); + Settings settings = Settings.builder().put(SkillSettings.PPL_EXECUTION_ENABLED.getKey(), true).build(); + ClusterService clusterService = mock(ClusterService.class); + when(clusterService.getSettings()).thenReturn(settings); + when(clusterService.getClusterSettings()).thenReturn(new ClusterSettings(settings, Set.of(SkillSettings.PPL_EXECUTION_ENABLED))); + ClusterSettingHelper clusterSettingHelper = new ClusterSettingHelper(settings, clusterService); + PPLTool.Factory.getInstance().init(client, clusterSettingHelper); } @Test @@ -401,6 +413,26 @@ public void testTool_executePPLFailure() { ); } + @Test + public void test_pplTool_whenPPLExecutionDisabled_returnOnlyContainsPPL() { + Settings settings = Settings.builder().put(SkillSettings.PPL_EXECUTION_ENABLED.getKey(), false).build(); + ClusterService clusterService = mock(ClusterService.class); + when(clusterService.getSettings()).thenReturn(settings); + when(clusterService.getClusterSettings()).thenReturn(new ClusterSettings(settings, Set.of(SkillSettings.PPL_EXECUTION_ENABLED))); + ClusterSettingHelper clusterSettingHelper = new ClusterSettingHelper(settings, clusterService); + PPLTool.Factory.getInstance().init(client, clusterSettingHelper); + PPLTool tool = PPLTool.Factory + .getInstance() + .create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt", "head", "100")); + assertEquals(PPLTool.TYPE, tool.getName()); + + tool.run(ImmutableMap.of("index", "demo", "question", "demo"), ActionListener.wrap(executePPLResult -> { + Map returnResults = gson.fromJson(executePPLResult, Map.class); + assertNull(returnResults.get("executionResult")); + assertEquals("source=demo| head 1", returnResults.get("ppl")); + }, log::error)); + } + private void createMappings() { indexMappings = new HashMap<>(); indexMappings diff --git a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java index 658a3fc7..853a2974 100644 --- a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java +++ b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java @@ -63,6 +63,7 @@ public void updateClusterSettings() { updateClusterSettings("plugins.ml_commons.jvm_heap_memory_threshold", 100); updateClusterSettings("plugins.ml_commons.allow_registering_model_via_url", true); updateClusterSettings("plugins.ml_commons.agent_framework_enabled", true); + updateClusterSettings("plugins.skills.ppl_execution_enabled", true); } @SneakyThrows diff --git a/src/test/java/org/opensearch/integTest/PPLToolIT.java b/src/test/java/org/opensearch/integTest/PPLToolIT.java index d25c6a95..b208e1f2 100644 --- a/src/test/java/org/opensearch/integTest/PPLToolIT.java +++ b/src/test/java/org/opensearch/integTest/PPLToolIT.java @@ -22,8 +22,6 @@ @Log4j2 public class PPLToolIT extends ToolIntegrationTest { - private String TEST_INDEX_NAME = "employee"; - @Override List promptHandlers() { PromptHandler PPLHandler = new PromptHandler() { @@ -60,6 +58,14 @@ public void testPPLTool() { ); } + public void test_PPLTool_whenPPLExecutionDisabled_ResultOnlyContainsPPL() { + updateClusterSettings("plugins.skills.ppl_execution_enabled", false); + prepareIndex(); + String agentId = registerAgent(); + String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"correct\", \"index\": \"employee\"}}"); + assertEquals("{\"ppl\":\"source\\u003demployee| where age \\u003e 56 | stats COUNT() as cnt\"}", result); + } + public void testPPLTool_withWrongPPLGenerated_thenThrowException() { prepareIndex(); String agentId = registerAgent(); @@ -148,8 +154,7 @@ private String registerAgent() { ) ); registerAgentRequestBody = registerAgentRequestBody.replace("", modelId); - String agentId = createAgent(registerAgentRequestBody); - return agentId; + return createAgent(registerAgentRequestBody); } @SneakyThrows @@ -166,14 +171,14 @@ private String registerAgentWithWrongModelId() { ) ); registerAgentRequestBody = registerAgentRequestBody.replace("", "wrong_model_id"); - String agentId = createAgent(registerAgentRequestBody); - return agentId; + return createAgent(registerAgentRequestBody); } @SneakyThrows private void prepareIndex() { + String testIndexName = "employee"; createIndexWithConfiguration( - TEST_INDEX_NAME, + testIndexName, "{\n" + " \"mappings\": {\n" + " \"properties\": {\n" @@ -187,8 +192,8 @@ private void prepareIndex() { + " }\n" + "}" ); - addDocToIndex(TEST_INDEX_NAME, "0", List.of("age", "name"), List.of(56, "john")); - addDocToIndex(TEST_INDEX_NAME, "1", List.of("age", "name"), List.of(56, "smith")); + addDocToIndex(testIndexName, "0", List.of("age", "name"), List.of(56, "john")); + addDocToIndex(testIndexName, "1", List.of("age", "name"), List.of(56, "smith")); } } From d5af2e1a949b6252cb3eb96e1fa4da382ee3bb0b Mon Sep 17 00:00:00 2001 From: gaobinlong Date: Thu, 18 Jul 2024 10:52:29 +0800 Subject: [PATCH 102/119] [Backport 2.x] Add CreateAnomalyDetectorTool (#355) * Add CreateAnomalyDetectorTool (#348) * Add CreateAnomalyDetectorTool Signed-off-by: gaobinlong * Optimize some code Signed-off-by: gaobinlong * Fix test failure Signed-off-by: gaobinlong * Optimize exception Signed-off-by: gaobinlong --------- Signed-off-by: gaobinlong (cherry picked from commit 63be17914b76a686978e9147eb3995f4968ef9d8) * Fix test failure Signed-off-by: gaobinlong * Fix warning Signed-off-by: gaobinlong * Fix comment format Signed-off-by: gaobinlong --------- Signed-off-by: gaobinlong --- .../java/org/opensearch/agent/ToolPlugin.java | 5 +- .../tools/CreateAnomalyDetectorTool.java | 453 ++++++++++++++++++ .../org/opensearch/agent/tools/PPLTool.java | 25 +- .../agent/tools/utils/ToolHelper.java | 42 ++ .../CreateAnomalyDetectorDefaultPrompt.json | 4 + .../tools/CreateAnomalyDetectorToolTests.java | 280 +++++++++++ .../CreateAnomalyDetectorToolIT.java | 345 +++++++++++++ ...te_anomaly_detector_tool_request_body.json | 12 + 8 files changed, 1142 insertions(+), 24 deletions(-) create mode 100644 src/main/java/org/opensearch/agent/tools/CreateAnomalyDetectorTool.java create mode 100644 src/main/java/org/opensearch/agent/tools/utils/ToolHelper.java create mode 100644 src/main/resources/org/opensearch/agent/tools/CreateAnomalyDetectorDefaultPrompt.json create mode 100644 src/test/java/org/opensearch/agent/tools/CreateAnomalyDetectorToolTests.java create mode 100644 src/test/java/org/opensearch/integTest/CreateAnomalyDetectorToolIT.java create mode 100644 src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_create_anomaly_detector_tool_request_body.json diff --git a/src/main/java/org/opensearch/agent/ToolPlugin.java b/src/main/java/org/opensearch/agent/ToolPlugin.java index d5c418ae..74ff6bf4 100644 --- a/src/main/java/org/opensearch/agent/ToolPlugin.java +++ b/src/main/java/org/opensearch/agent/ToolPlugin.java @@ -11,6 +11,7 @@ import java.util.function.Supplier; import org.opensearch.agent.common.SkillSettings; +import org.opensearch.agent.tools.CreateAnomalyDetectorTool; import org.opensearch.agent.tools.NeuralSparseSearchTool; import org.opensearch.agent.tools.PPLTool; import org.opensearch.agent.tools.RAGTool; @@ -73,6 +74,7 @@ public Collection createComponents( SearchAnomalyDetectorsTool.Factory.getInstance().init(client, namedWriteableRegistry); SearchAnomalyResultsTool.Factory.getInstance().init(client, namedWriteableRegistry); SearchMonitorsTool.Factory.getInstance().init(client); + CreateAnomalyDetectorTool.Factory.getInstance().init(client); return Collections.emptyList(); } @@ -87,7 +89,8 @@ public List> getToolFactories() { SearchAlertsTool.Factory.getInstance(), SearchAnomalyDetectorsTool.Factory.getInstance(), SearchAnomalyResultsTool.Factory.getInstance(), - SearchMonitorsTool.Factory.getInstance() + SearchMonitorsTool.Factory.getInstance(), + CreateAnomalyDetectorTool.Factory.getInstance() ); } diff --git a/src/main/java/org/opensearch/agent/tools/CreateAnomalyDetectorTool.java b/src/main/java/org/opensearch/agent/tools/CreateAnomalyDetectorTool.java new file mode 100644 index 00000000..9014c907 --- /dev/null +++ b/src/main/java/org/opensearch/agent/tools/CreateAnomalyDetectorTool.java @@ -0,0 +1,453 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.opensearch.ml.common.utils.StringUtils.gson; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.security.AccessController; +import java.security.PrivilegedExceptionAction; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.StringJoiner; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.apache.commons.text.StringSubstitutor; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest; +import org.opensearch.agent.tools.utils.ToolHelper; +import org.opensearch.client.Client; +import org.opensearch.cluster.metadata.MappingMetadata; +import org.opensearch.core.action.ActionListener; +import org.opensearch.ml.common.FunctionName; +import org.opensearch.ml.common.dataset.remote.RemoteInferenceInputDataSet; +import org.opensearch.ml.common.input.MLInput; +import org.opensearch.ml.common.output.model.ModelTensor; +import org.opensearch.ml.common.output.model.ModelTensorOutput; +import org.opensearch.ml.common.output.model.ModelTensors; +import org.opensearch.ml.common.spi.tools.Tool; +import org.opensearch.ml.common.spi.tools.ToolAnnotation; +import org.opensearch.ml.common.transport.prediction.MLPredictionTaskAction; +import org.opensearch.ml.common.transport.prediction.MLPredictionTaskRequest; + +import com.google.common.collect.ImmutableMap; + +import joptsimple.internal.Strings; +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j2; + +/** + * A tool used to help creating anomaly detector, the only one input parameter is the index name, this tool will get the mappings of the index + * in flight and let LLM give the suggested category field, aggregation field and correspond aggregation method which are required for the create + * anomaly detector API, the output of this tool is like: + *{ + * "index": "opensearch_dashboards_sample_data_ecommerce", + * "categoryField": "geoip.country_iso_code", + * "aggregationField": "total_quantity,total_unique_products,taxful_total_price", + * "aggregationMethod": "sum,count,sum", + * "dateFields": "customer_birth_date,order_date,products.created_on" + * } + */ +@Log4j2 +@Setter +@Getter +@ToolAnnotation(CreateAnomalyDetectorTool.TYPE) +public class CreateAnomalyDetectorTool implements Tool { + // the type of this tool + public static final String TYPE = "CreateAnomalyDetectorTool"; + + // the default description of this tool + private static final String DEFAULT_DESCRIPTION = + "This is a tool used to help creating anomaly detector. It takes a required argument which is the name of the index, extract the index mappings and let the LLM to give the suggested aggregation field, aggregation method, category field and the date field which are required to create an anomaly detector."; + // the regex used to extract the key information from the response of LLM + private static final String EXTRACT_INFORMATION_REGEX = + "(?s).*\\{category_field=([^|]*)\\|aggregation_field=([^|]*)\\|aggregation_method=([^}]*)}.*"; + // valid field types which support aggregation + private static final Set VALID_FIELD_TYPES = Set + .of( + "keyword", + "constant_keyword", + "wildcard", + "long", + "integer", + "short", + "byte", + "double", + "float", + "half_float", + "scaled_float", + "unsigned_long", + "ip" + ); + // the index name key in the output + private static final String OUTPUT_KEY_INDEX = "index"; + // the category field key in the output + private static final String OUTPUT_KEY_CATEGORY_FIELD = "categoryField"; + // the aggregation field key in the output + private static final String OUTPUT_KEY_AGGREGATION_FIELD = "aggregationField"; + // the aggregation method name key in the output + private static final String OUTPUT_KEY_AGGREGATION_METHOD = "aggregationMethod"; + // the date fields key in the output + private static final String OUTPUT_KEY_DATE_FIELDS = "dateFields"; + // the default prompt dictionary, includes claude and openai + private static final Map DEFAULT_PROMPT_DICT = loadDefaultPromptFromFile(); + // the name of this tool + @Setter + @Getter + private String name = TYPE; + // the description of this tool + @Getter + @Setter + private String description = DEFAULT_DESCRIPTION; + + // the version of this tool + @Getter + private String version; + + // the OpenSearch transport client + private Client client; + // the mode id of LLM + @Getter + private String modelId; + // LLM model type, CLAUDE or OPENAI + @Getter + private ModelType modelType; + // the default prompt for creating anomaly detector + private String contextPrompt; + + enum ModelType { + CLAUDE, + OPENAI; + + public static ModelType from(String value) { + return valueOf(value.toUpperCase(Locale.ROOT)); + } + + } + + /** + * + * @param client the OpenSearch transport client + * @param modelId the model ID of LLM + */ + public CreateAnomalyDetectorTool(Client client, String modelId, String modelType) { + this.client = client; + this.modelId = modelId; + if (!ModelType.OPENAI.toString().equalsIgnoreCase(modelType) && !ModelType.CLAUDE.toString().equalsIgnoreCase(modelType)) { + throw new IllegalArgumentException("Unsupported model_type: " + modelType); + } + this.modelType = ModelType.from(modelType); + this.contextPrompt = DEFAULT_PROMPT_DICT.getOrDefault(this.modelType.toString(), ""); + } + + /** + * The main running method of this tool + * @param parameters the input parameters + * @param listener the action listener + * + */ + @Override + public void run(Map parameters, ActionListener listener) { + Map enrichedParameters = enrichParameters(parameters); + String indexName = enrichedParameters.get("index"); + if (Strings.isNullOrEmpty(indexName)) { + throw new IllegalArgumentException( + "Return this final answer to human directly and do not use other tools: 'Please provide index name'. Please try to directly send this message to human to ask for index name" + ); + } + if (indexName.startsWith(".")) { + throw new IllegalArgumentException( + "CreateAnomalyDetectionTool doesn't support searching indices starting with '.' since it could be system index, current searching index name: " + + indexName + ); + } + + GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(indexName); + client.admin().indices().getMappings(getMappingsRequest, ActionListener.wrap(response -> { + Map mappings = response.getMappings(); + if (mappings.size() == 0) { + throw new IllegalArgumentException("No mapping found for the index: " + indexName); + } + + MappingMetadata mappingMetadata; + // when the index name is wildcard pattern, we fetch the mappings of the first index + if (indexName.contains("*")) { + mappingMetadata = mappings.get((String) mappings.keySet().toArray()[0]); + } else { + mappingMetadata = mappings.get(indexName); + } + + Map mappingSource = (Map) mappingMetadata.getSourceAsMap().get("properties"); + if (Objects.isNull(mappingSource)) { + throw new IllegalArgumentException( + "The index " + indexName + " doesn't have mapping metadata, please add data to it or using another index." + ); + } + + // flatten all the fields in the mapping + Map fieldsToType = new HashMap<>(); + ToolHelper.extractFieldNamesTypes(mappingSource, fieldsToType, ""); + + // find all date type fields from the mapping + final Set dateFields = findDateTypeFields(fieldsToType); + if (dateFields.isEmpty()) { + throw new IllegalArgumentException( + "The index " + indexName + " doesn't have date type fields, cannot create an anomaly detector for it." + ); + } + StringJoiner dateFieldsJoiner = new StringJoiner(","); + dateFields.forEach(dateFieldsJoiner::add); + + // filter the mapping to improve the accuracy of the result + // only fields support aggregation can be existed in the mapping and sent to LLM + Map filteredMapping = fieldsToType + .entrySet() + .stream() + .filter(entry -> VALID_FIELD_TYPES.contains(entry.getValue())) + .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); + + // construct the prompt + String prompt = constructPrompt(filteredMapping, indexName); + RemoteInferenceInputDataSet inputDataSet = RemoteInferenceInputDataSet + .builder() + .parameters(Collections.singletonMap("prompt", prompt)) + .build(); + ActionRequest request = new MLPredictionTaskRequest( + modelId, + MLInput.builder().algorithm(FunctionName.REMOTE).inputDataset(inputDataSet).build(), + null + ); + + client.execute(MLPredictionTaskAction.INSTANCE, request, ActionListener.wrap(mlTaskResponse -> { + ModelTensorOutput modelTensorOutput = (ModelTensorOutput) mlTaskResponse.getOutput(); + ModelTensors modelTensors = modelTensorOutput.getMlModelOutputs().get(0); + ModelTensor modelTensor = modelTensors.getMlModelTensors().get(0); + Map dataAsMap = (Map) modelTensor.getDataAsMap(); + if (dataAsMap == null) { + listener.onFailure(new IllegalStateException("Remote endpoint fails to inference.")); + return; + } + String finalResponse = dataAsMap.get("response"); + if (Strings.isNullOrEmpty(finalResponse)) { + listener.onFailure(new IllegalStateException("Remote endpoint fails to inference, no response found.")); + return; + } + + // use regex pattern to extract the suggested parameters for the create anomaly detector API + Pattern pattern = Pattern.compile(EXTRACT_INFORMATION_REGEX); + Matcher matcher = pattern.matcher(finalResponse); + if (!matcher.matches()) { + log + .error( + "The inference result from remote endpoint is not valid because the result: [" + + finalResponse + + "] cannot match the regex: " + + EXTRACT_INFORMATION_REGEX + ); + listener + .onFailure( + new IllegalStateException( + "The inference result from remote endpoint is not valid, cannot extract the key information from the result." + ) + ); + return; + } + + // remove double quotes or whitespace if exists + String categoryField = matcher.group(1).replaceAll("\"", "").strip(); + String aggregationField = matcher.group(2).replaceAll("\"", "").strip(); + String aggregationMethod = matcher.group(3).replaceAll("\"", "").strip(); + + Map result = ImmutableMap + .of( + OUTPUT_KEY_INDEX, + indexName, + OUTPUT_KEY_CATEGORY_FIELD, + categoryField, + OUTPUT_KEY_AGGREGATION_FIELD, + aggregationField, + OUTPUT_KEY_AGGREGATION_METHOD, + aggregationMethod, + OUTPUT_KEY_DATE_FIELDS, + dateFieldsJoiner.toString() + ); + listener.onResponse((T) AccessController.doPrivileged((PrivilegedExceptionAction) () -> gson.toJson(result))); + }, e -> { + log.error("fail to predict model: " + e); + listener.onFailure(e); + })); + }, e -> { + log.error("failed to get mapping: " + e); + if (e.toString().contains("IndexNotFoundException")) { + listener + .onFailure( + new IllegalArgumentException( + "Return this final answer to human directly and do not use other tools: 'The index doesn't exist, please provide another index and retry'. Please try to directly send this message to human to ask for index name" + ) + ); + } else { + listener.onFailure(e); + } + })); + } + + /** + * Enrich the parameters by adding the parameters extracted from the chat + * @param parameters the original parameters + * @return the enriched parameters with parameters extracting from the chat + */ + private Map enrichParameters(Map parameters) { + Map result = new HashMap<>(parameters); + try { + // input is a map + Map chatParameters = gson.fromJson(parameters.get("input"), Map.class); + result.putAll(chatParameters); + } catch (Exception e) { + // input is a string + String indexName = parameters.getOrDefault("input", ""); + if (!indexName.isEmpty()) { + result.put("index", indexName); + } + } + return result; + } + + /** + * + * @param fieldsToType the flattened field-> field type mapping + * @return a list containing all the date type fields + */ + private Set findDateTypeFields(final Map fieldsToType) { + Set result = new HashSet<>(); + for (Map.Entry entry : fieldsToType.entrySet()) { + String value = entry.getValue(); + if (value.equals("date") || value.equals("date_nanos")) { + result.add(entry.getKey()); + } + } + return result; + } + + @SuppressWarnings("unchecked") + private static Map loadDefaultPromptFromFile() { + try (InputStream inputStream = CreateAnomalyDetectorTool.class.getResourceAsStream("CreateAnomalyDetectorDefaultPrompt.json")) { + if (inputStream != null) { + return gson.fromJson(new String(inputStream.readAllBytes(), StandardCharsets.UTF_8), Map.class); + } + } catch (IOException e) { + log.error("Failed to load prompt from the file CreateAnomalyDetectorDefaultPrompt.json, error: ", e); + } + return new HashMap<>(); + } + + /** + * + * @param fieldsToType the flattened field-> field type mapping + * @param indexName the index name + * @return the prompt about creating anomaly detector + */ + private String constructPrompt(final Map fieldsToType, final String indexName) { + StringJoiner tableInfoJoiner = new StringJoiner("\n"); + for (Map.Entry entry : fieldsToType.entrySet()) { + tableInfoJoiner.add("- " + entry.getKey() + ": " + entry.getValue()); + } + + Map indexInfo = ImmutableMap.of("indexName", indexName, "indexMapping", tableInfoJoiner.toString()); + StringSubstitutor substitutor = new StringSubstitutor(indexInfo, "${indexInfo.", "}"); + return substitutor.replace(contextPrompt); + } + + /** + * + * @param parameters the input parameters + * @return false if the input parameters is null or empty + */ + @Override + public boolean validate(Map parameters) { + return parameters != null && parameters.size() != 0; + } + + /** + * + * @return the type of this tool + */ + @Override + public String getType() { + return TYPE; + } + + /** + * The tool factory + */ + public static class Factory implements Tool.Factory { + private Client client; + + private static CreateAnomalyDetectorTool.Factory INSTANCE; + + /** + * Create or return the singleton factory instance + */ + public static CreateAnomalyDetectorTool.Factory getInstance() { + if (INSTANCE != null) { + return INSTANCE; + } + synchronized (CreateAnomalyDetectorTool.class) { + if (INSTANCE != null) { + return INSTANCE; + } + INSTANCE = new CreateAnomalyDetectorTool.Factory(); + return INSTANCE; + } + } + + public void init(Client client) { + this.client = client; + } + + /** + * + * @param map the input parameters + * @return the instance of this tool + */ + @Override + public CreateAnomalyDetectorTool create(Map map) { + String modelId = (String) map.getOrDefault("model_id", ""); + if (modelId.isEmpty()) { + throw new IllegalArgumentException("model_id cannot be empty."); + } + String modelType = (String) map.getOrDefault("model_type", ModelType.CLAUDE.toString()); + if (!ModelType.OPENAI.toString().equalsIgnoreCase(modelType) && !ModelType.CLAUDE.toString().equalsIgnoreCase(modelType)) { + throw new IllegalArgumentException("Unsupported model_type: " + modelType); + } + return new CreateAnomalyDetectorTool(client, modelId, modelType); + } + + @Override + public String getDefaultDescription() { + return DEFAULT_DESCRIPTION; + } + + @Override + public String getDefaultType() { + return TYPE; + } + + @Override + public String getDefaultVersion() { + return null; + } + } +} diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index 3f8c728b..a7cca56e 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -33,6 +33,7 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.agent.common.SkillSettings; import org.opensearch.agent.tools.utils.ClusterSettingHelper; +import org.opensearch.agent.tools.utils.ToolHelper; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.core.action.ActionListener; @@ -396,7 +397,7 @@ private String constructTableInfo(SearchHit[] searchHits, Map fieldsToType = new HashMap<>(); - extractNamesTypes(mappingSource, fieldsToType, ""); + ToolHelper.extractFieldNamesTypes(mappingSource, fieldsToType, ""); StringJoiner tableInfoJoiner = new StringJoiner("\n"); List sortedKeys = new ArrayList<>(fieldsToType.keySet()); Collections.sort(sortedKeys); @@ -434,28 +435,6 @@ private String constructPrompt(String tableInfo, String question, String indexNa return substitutor.replace(contextPrompt); } - private void extractNamesTypes(Map mappingSource, Map fieldsToType, String prefix) { - if (!prefix.isEmpty()) { - prefix += "."; - } - - for (Map.Entry entry : mappingSource.entrySet()) { - String n = entry.getKey(); - Object v = entry.getValue(); - - if (v instanceof Map) { - Map vMap = (Map) v; - if (vMap.containsKey("type")) { - if (!((vMap.getOrDefault("type", "")).equals("alias"))) { - fieldsToType.put(prefix + n, (String) vMap.get("type")); - } - } else if (vMap.containsKey("properties")) { - extractNamesTypes((Map) vMap.get("properties"), fieldsToType, prefix + n); - } - } - } - } - private static void extractSamples(Map sampleSource, Map fieldsToSample, String prefix) throws PrivilegedActionException { if (!prefix.isEmpty()) { diff --git a/src/main/java/org/opensearch/agent/tools/utils/ToolHelper.java b/src/main/java/org/opensearch/agent/tools/utils/ToolHelper.java new file mode 100644 index 00000000..34c80135 --- /dev/null +++ b/src/main/java/org/opensearch/agent/tools/utils/ToolHelper.java @@ -0,0 +1,42 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools.utils; + +import java.util.Map; + +public class ToolHelper { + /** + * Flatten all the fields in the mappings, insert the field to fieldType mapping to a map + * @param mappingSource the mappings of an index + * @param fieldsToType the result containing the field to fieldType mapping + * @param prefix the parent field path + */ + public static void extractFieldNamesTypes(Map mappingSource, Map fieldsToType, String prefix) { + if (prefix.length() > 0) { + prefix += "."; + } + + for (Map.Entry entry : mappingSource.entrySet()) { + String n = entry.getKey(); + Object v = entry.getValue(); + + if (v instanceof Map) { + Map vMap = (Map) v; + if (vMap.containsKey("type")) { + if (!((vMap.getOrDefault("type", "")).equals("alias"))) { + fieldsToType.put(prefix + n, (String) vMap.get("type")); + } + } + if (vMap.containsKey("properties")) { + extractFieldNamesTypes((Map) vMap.get("properties"), fieldsToType, prefix + n); + } + if (vMap.containsKey("fields")) { + extractFieldNamesTypes((Map) vMap.get("fields"), fieldsToType, prefix + n); + } + } + } + } +} diff --git a/src/main/resources/org/opensearch/agent/tools/CreateAnomalyDetectorDefaultPrompt.json b/src/main/resources/org/opensearch/agent/tools/CreateAnomalyDetectorDefaultPrompt.json new file mode 100644 index 00000000..9b69bce7 --- /dev/null +++ b/src/main/resources/org/opensearch/agent/tools/CreateAnomalyDetectorDefaultPrompt.json @@ -0,0 +1,4 @@ +{ + "CLAUDE": "Human:\" turn\": Here is an example of the create anomaly detector API: POST _plugins/_anomaly_detection/detectors, {\"time_field\":\"timestamp\",\"indices\":[\"server_log*\"],\"feature_attributes\":[{\"feature_name\":\"test\",\"feature_enabled\":true,\"aggregation_query\":{\"test\":{\"sum\":{\"field\":\"value\"}}}}],\"category_field\":[\"ip\"]}, and here are the mapping info containing all the fields in the index ${indexInfo.indexName}: ${indexInfo.indexMapping}, and the optional aggregation methods are count, avg, min, max and sum. Please give me some suggestion about creating an anomaly detector for the index ${indexInfo.indexName}, you need to give the key information: the top 3 suitable aggregation fields which are numeric types and the suitable aggregation method for each field, if there are no numeric type fields, both the aggregation field and method are empty string, and also give the category field if there exists a keyword type field like ip, address, host, city, country or region, if not exist, the category field is empty. Show me a format of keyed and pipe-delimited list wrapped in a curly bracket just like {category_field=the category field if exists|aggregation_field=comma-delimited list of all the aggregation field names|aggregation_method=comma-delimited list of all the aggregation methods}. \n\nAssistant:\" turn\"", + "OPENAI": "Here is an example of the create anomaly detector API: POST _plugins/_anomaly_detection/detectors, {\"time_field\":\"timestamp\",\"indices\":[\"server_log*\"],\"feature_attributes\":[{\"feature_name\":\"test\",\"feature_enabled\":true,\"aggregation_query\":{\"test\":{\"sum\":{\"field\":\"value\"}}}}],\"category_field\":[\"ip\"]}, and here are the mapping info containing all the fields in the index ${indexInfo.indexName}: ${indexInfo.indexMapping}, and the optional aggregation methods are count, avg, min, max and sum. Please give me some suggestion about creating an anomaly detector for the index ${indexInfo.indexName}, you need to give the key information: the top 3 suitable aggregation fields which are numeric types and the suitable aggregation method for each field, if there are no numeric type fields, both the aggregation field and method are empty string, and also give the category field if there exists a keyword type field like ip, address, host, city, country or region, if not exist, the category field is empty. Show me a format of keyed and pipe-delimited list wrapped in a curly bracket just like {category_field=the category field if exists|aggregation_field=comma-delimited list of all the aggregation field names|aggregation_method=comma-delimited list of all the aggregation methods}. " +} diff --git a/src/test/java/org/opensearch/agent/tools/CreateAnomalyDetectorToolTests.java b/src/test/java/org/opensearch/agent/tools/CreateAnomalyDetectorToolTests.java new file mode 100644 index 00000000..0749ab70 --- /dev/null +++ b/src/test/java/org/opensearch/agent/tools/CreateAnomalyDetectorToolTests.java @@ -0,0 +1,280 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.when; +import static org.opensearch.ml.common.CommonValue.ML_CONNECTOR_INDEX; +import static org.opensearch.ml.common.utils.StringUtils.gson; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.opensearch.client.AdminClient; +import org.opensearch.client.Client; +import org.opensearch.client.IndicesAdminClient; +import org.opensearch.cluster.metadata.MappingMetadata; +import org.opensearch.core.action.ActionListener; +import org.opensearch.ml.common.output.model.MLResultDataType; +import org.opensearch.ml.common.output.model.ModelTensor; +import org.opensearch.ml.common.output.model.ModelTensorOutput; +import org.opensearch.ml.common.output.model.ModelTensors; +import org.opensearch.ml.common.transport.MLTaskResponse; +import org.opensearch.ml.common.transport.prediction.MLPredictionTaskAction; + +import com.google.common.collect.ImmutableMap; + +import lombok.extern.log4j.Log4j2; + +@Log4j2 +public class CreateAnomalyDetectorToolTests { + @Mock + private Client client; + @Mock + private AdminClient adminClient; + @Mock + private IndicesAdminClient indicesAdminClient; + @Mock + private GetMappingsResponse getMappingsResponse; + @Mock + private MappingMetadata mappingMetadata; + private Map mockedMappings; + private Map indexMappings; + + @Mock + private MLTaskResponse mlTaskResponse; + @Mock + private ModelTensorOutput modelTensorOutput; + @Mock + private ModelTensors modelTensors; + + private ModelTensor modelTensor; + + private Map modelReturns; + + private String mockedIndexName = "http_logs"; + private String mockedResponse = "{category_field=|aggregation_field=response,responseLatency|aggregation_method=count,avg}"; + private String mockedResult = + "{\"index\":\"http_logs\",\"categoryField\":\"\",\"aggregationField\":\"response,responseLatency\",\"aggregationMethod\":\"count,avg\",\"dateFields\":\"date\"}"; + + private String mockedResultForIndexPattern = + "{\"index\":\"http_logs*\",\"categoryField\":\"\",\"aggregationField\":\"response,responseLatency\",\"aggregationMethod\":\"count,avg\",\"dateFields\":\"date\"}"; + + @Before + public void setup() { + MockitoAnnotations.openMocks(this); + createMappings(); + // get mapping + when(mappingMetadata.getSourceAsMap()).thenReturn(indexMappings); + when(getMappingsResponse.getMappings()).thenReturn(mockedMappings); + when(client.admin()).thenReturn(adminClient); + when(adminClient.indices()).thenReturn(indicesAdminClient); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(getMappingsResponse); + return null; + }).when(indicesAdminClient).getMappings(any(), any()); + + initMLTensors(); + CreateAnomalyDetectorTool.Factory.getInstance().init(client); + } + + @Test + public void testModelIdIsNullOrEmpty() { + Exception exception = assertThrows( + IllegalArgumentException.class, + () -> CreateAnomalyDetectorTool.Factory.getInstance().create(ImmutableMap.of("model_id", "")) + ); + assertEquals("model_id cannot be empty.", exception.getMessage()); + } + + @Test + public void testModelType() { + Exception exception = assertThrows( + IllegalArgumentException.class, + () -> CreateAnomalyDetectorTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "model_type", "unknown")) + ); + assertEquals("Unsupported model_type: unknown", exception.getMessage()); + + CreateAnomalyDetectorTool tool = CreateAnomalyDetectorTool.Factory + .getInstance() + .create(ImmutableMap.of("model_id", "modelId", "model_type", "openai")); + assertEquals(CreateAnomalyDetectorTool.TYPE, tool.getName()); + assertEquals("modelId", tool.getModelId()); + assertEquals("OPENAI", tool.getModelType().toString()); + + tool = CreateAnomalyDetectorTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId", "model_type", "claude")); + assertEquals(CreateAnomalyDetectorTool.TYPE, tool.getName()); + assertEquals("modelId", tool.getModelId()); + assertEquals("CLAUDE", tool.getModelType().toString()); + } + + @Test + public void testTool() { + CreateAnomalyDetectorTool tool = CreateAnomalyDetectorTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId")); + assertEquals(CreateAnomalyDetectorTool.TYPE, tool.getName()); + assertEquals("modelId", tool.getModelId()); + assertEquals("CLAUDE", tool.getModelType().toString()); + + tool + .run( + ImmutableMap.of("index", mockedIndexName), + ActionListener.wrap(response -> assertEquals(mockedResult, response), log::info) + ); + tool + .run( + ImmutableMap.of("index", mockedIndexName + "*"), + ActionListener.wrap(response -> assertEquals(mockedResultForIndexPattern, response), log::info) + ); + tool + .run( + ImmutableMap.of("input", mockedIndexName), + ActionListener.wrap(response -> assertEquals(mockedResult, response), log::info) + ); + tool + .run( + ImmutableMap.of("input", gson.toJson(ImmutableMap.of("index", mockedIndexName))), + ActionListener.wrap(response -> assertEquals(mockedResult, response), log::info) + ); + } + + @Test + public void testToolWithInvalidResponse() { + CreateAnomalyDetectorTool tool = CreateAnomalyDetectorTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId")); + + modelReturns = Collections.singletonMap("response", ""); + modelTensor = new ModelTensor("tensor", new Number[0], new long[0], MLResultDataType.STRING, null, null, modelReturns); + initMLTensors(); + + Exception exception = assertThrows( + IllegalStateException.class, + () -> tool + .run(ImmutableMap.of("index", mockedIndexName), ActionListener.wrap(response -> assertEquals(response, ""), e -> { + throw new IllegalStateException(e.getMessage()); + })) + ); + assertEquals("Remote endpoint fails to inference, no response found.", exception.getMessage()); + + modelReturns = Collections.singletonMap("response", "not valid response"); + modelTensor = new ModelTensor("tensor", new Number[0], new long[0], MLResultDataType.STRING, null, null, modelReturns); + initMLTensors(); + + exception = assertThrows( + IllegalStateException.class, + () -> tool + .run( + ImmutableMap.of("index", mockedIndexName), + ActionListener.wrap(response -> assertEquals(response, "not valid response"), e -> { + throw new IllegalStateException(e.getMessage()); + }) + ) + ); + assertEquals( + "The inference result from remote endpoint is not valid, cannot extract the key information from the result.", + exception.getMessage() + ); + + modelReturns = Collections.singletonMap("response", null); + modelTensor = new ModelTensor("tensor", new Number[0], new long[0], MLResultDataType.STRING, null, null, modelReturns); + initMLTensors(); + + exception = assertThrows( + IllegalStateException.class, + () -> tool + .run(ImmutableMap.of("index", mockedIndexName), ActionListener.wrap(response -> assertEquals(response, ""), e -> { + throw new IllegalStateException(e.getMessage()); + })) + ); + assertEquals("Remote endpoint fails to inference, no response found.", exception.getMessage()); + } + + @Test + public void testToolWithSystemIndex() { + CreateAnomalyDetectorTool tool = CreateAnomalyDetectorTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId")); + Exception exception = assertThrows( + IllegalArgumentException.class, + () -> tool.run(ImmutableMap.of("index", ML_CONNECTOR_INDEX), ActionListener.wrap(result -> {}, e -> {})) + ); + assertEquals( + "CreateAnomalyDetectionTool doesn't support searching indices starting with '.' since it could be system index, current searching index name: " + + ML_CONNECTOR_INDEX, + exception.getMessage() + ); + } + + @Test + public void testToolWithGetMappingFailed() { + CreateAnomalyDetectorTool tool = CreateAnomalyDetectorTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId")); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new Exception("No mapping found for the index: " + mockedIndexName)); + return null; + }).when(indicesAdminClient).getMappings(any(), any()); + + tool.run(ImmutableMap.of("index", mockedIndexName), ActionListener.wrap(result -> {}, e -> { + assertEquals("No mapping found for the index: " + mockedIndexName, e.getMessage()); + })); + } + + @Test + public void testToolWithPredictModelFailed() { + CreateAnomalyDetectorTool tool = CreateAnomalyDetectorTool.Factory.getInstance().create(ImmutableMap.of("model_id", "modelId")); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onFailure(new Exception("predict model failed")); + return null; + }).when(client).execute(eq(MLPredictionTaskAction.INSTANCE), any(), any()); + + tool.run(ImmutableMap.of("index", mockedIndexName), ActionListener.wrap(result -> {}, e -> { + assertEquals("predict model failed", e.getMessage()); + })); + } + + private void createMappings() { + indexMappings = new HashMap<>(); + indexMappings + .put( + "properties", + ImmutableMap + .of( + "response", + ImmutableMap.of("type", "integer"), + "responseLatency", + ImmutableMap.of("type", "float"), + "date", + ImmutableMap.of("type", "date") + ) + ); + mockedMappings = new HashMap<>(); + mockedMappings.put(mockedIndexName, mappingMetadata); + + modelReturns = Collections.singletonMap("response", mockedResponse); + modelTensor = new ModelTensor("tensor", new Number[0], new long[0], MLResultDataType.STRING, null, null, modelReturns); + } + + private void initMLTensors() { + when(modelTensors.getMlModelTensors()).thenReturn(Collections.singletonList(modelTensor)); + when(modelTensorOutput.getMlModelOutputs()).thenReturn(Collections.singletonList(modelTensors)); + when(mlTaskResponse.getOutput()).thenReturn(modelTensorOutput); + + // call model + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(mlTaskResponse); + return null; + }).when(client).execute(eq(MLPredictionTaskAction.INSTANCE), any(), any()); + } +} diff --git a/src/test/java/org/opensearch/integTest/CreateAnomalyDetectorToolIT.java b/src/test/java/org/opensearch/integTest/CreateAnomalyDetectorToolIT.java new file mode 100644 index 00000000..648a381b --- /dev/null +++ b/src/test/java/org/opensearch/integTest/CreateAnomalyDetectorToolIT.java @@ -0,0 +1,345 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.integTest; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; + +import org.hamcrest.MatcherAssert; +import org.opensearch.agent.tools.CreateAnomalyDetectorTool; +import org.opensearch.client.ResponseException; + +import lombok.SneakyThrows; + +public class CreateAnomalyDetectorToolIT extends ToolIntegrationTest { + private final String NORMAL_INDEX = "http_logs"; + private final String NORMAL_INDEX_WITH_NO_AVAILABLE_FIELDS = "products"; + private final String NORMAL_INDEX_WITH_NO_DATE_FIELDS = "normal_index_with_no_date_fields"; + private final String NORMAL_INDEX_WITH_NO_MAPPING = "normal_index_with_no_mapping"; + private final String ABNORMAL_INDEX = "abnormal_index"; + + @Override + List promptHandlers() { + PromptHandler createAnomalyDetectorToolHandler = new PromptHandler() { + @Override + String response(String prompt) { + int flag; + if (prompt.contains(NORMAL_INDEX)) { + flag = randomIntBetween(0, 9); + switch (flag) { + case 0: + return "{category_field=|aggregation_field=response,responseLatency|aggregation_method=count,avg}"; + case 1: + return "{category_field=ip|aggregation_field=response,responseLatency|aggregation_method=count,avg}"; + case 2: + return "{category_field=|aggregation_field=responseLatency|aggregation_method=avg}"; + case 3: + return "{category_field=country.keyword|aggregation_field=response,responseLatency|aggregation_method=count,avg}"; + case 4: + return "{category_field=country.keyword|aggregation_field=response.keyword|aggregation_method=count}"; + case 5: + return "{category_field=\"country.keyword\"|aggregation_field=\"response,responseLatency\"|aggregation_method=\"count,avg\"}"; + case 6: + return "{category_field=ip|aggregation_field=responseLatency|aggregation_method=avg}"; + case 7: + return "{category_field=\"ip\"|aggregation_field=\"responseLatency\"|aggregation_method=\"avg\"}"; + case 8: + return "{category_field= ip |aggregation_field= responseLatency |aggregation_method= avg }"; + case 9: + return "{category_field=\" ip \"|aggregation_field=\" responseLatency \"|aggregation_method=\" avg \"}"; + default: + return "{category_field=|aggregation_field=response|aggregation_method=count}"; + } + } else if (prompt.contains(NORMAL_INDEX_WITH_NO_AVAILABLE_FIELDS)) { + flag = randomIntBetween(0, 9); + switch (flag) { + case 0: + return "{category_field=|aggregation_field=|aggregation_method=}"; + case 1: + return "{category_field= |aggregation_field= |aggregation_method= }"; + case 2: + return "{category_field=\"\"|aggregation_field=\"\"|aggregation_method=\"\"}"; + case 3: + return "{category_field=product|aggregation_field=|aggregation_method=sum}"; + case 4: + return "{category_field=product|aggregation_field=sales|aggregation_method=}"; + case 5: + return "{category_field=product|aggregation_field=\"\"|aggregation_method=sum}"; + case 6: + return "{category_field=product|aggregation_field=sales|aggregation_method=\"\"}"; + case 7: + return "{category_field=product|aggregation_field= |aggregation_method=sum}"; + case 8: + return "{category_field=product|aggregation_field=sales |aggregation_method= }"; + case 9: + return "{category_field=\"\"|aggregation_field= |aggregation_method=\"\" }"; + default: + return "{category_field=product|aggregation_field= |aggregation_method= }"; + } + } else { + flag = randomIntBetween(0, 1); + switch (flag) { + case 0: + return "wrong response"; + case 1: + return "{category_field=product}"; + default: + return "{category_field=}"; + } + } + } + + @Override + boolean apply(String prompt) { + return true; + } + }; + return List.of(createAnomalyDetectorToolHandler); + } + + @Override + String toolType() { + return CreateAnomalyDetectorTool.TYPE; + } + + public void testCreateAnomalyDetectorTool() { + prepareIndex(); + String agentId = registerAgent(); + String index; + if (randomIntBetween(0, 1) == 0) { + index = NORMAL_INDEX; + } else { + index = NORMAL_INDEX_WITH_NO_AVAILABLE_FIELDS; + } + String result = executeAgent(agentId, "{\"parameters\": {\"index\":\"" + index + "\"}}"); + assertTrue(result.contains("index")); + assertTrue(result.contains("categoryField")); + assertTrue(result.contains("aggregationField")); + assertTrue(result.contains("aggregationMethod")); + assertTrue(result.contains("dateFields")); + } + + public void testCreateAnomalyDetectorToolWithNonExistentModelId() { + prepareIndex(); + String agentId = registerAgentWithWrongModelId(); + Exception exception = assertThrows( + ResponseException.class, + () -> executeAgent(agentId, "{\"parameters\": {\"index\":\"" + ABNORMAL_INDEX + "\"}}") + ); + MatcherAssert.assertThat(exception.getMessage(), allOf(containsString("Failed to find model"))); + } + + public void testCreateAnomalyDetectorToolWithUnexpectedResult() { + prepareIndex(); + String agentId = registerAgent(); + + Exception exception = assertThrows( + ResponseException.class, + () -> executeAgent(agentId, "{\"parameters\": {\"index\":\"" + NORMAL_INDEX_WITH_NO_MAPPING + "\"}}") + ); + MatcherAssert + .assertThat( + exception.getMessage(), + allOf( + containsString( + "The index " + + NORMAL_INDEX_WITH_NO_MAPPING + + " doesn't have mapping metadata, please add data to it or using another index." + ) + ) + ); + + exception = assertThrows( + ResponseException.class, + () -> executeAgent(agentId, "{\"parameters\": {\"index\":\"" + NORMAL_INDEX_WITH_NO_DATE_FIELDS + "\"}}") + ); + MatcherAssert + .assertThat( + exception.getMessage(), + allOf( + containsString( + "The index " + + NORMAL_INDEX_WITH_NO_DATE_FIELDS + + " doesn't have date type fields, cannot create an anomaly detector for it." + ) + ) + ); + + exception = assertThrows( + ResponseException.class, + () -> executeAgent(agentId, "{\"parameters\": {\"index\":\"" + ABNORMAL_INDEX + "\"}}") + ); + MatcherAssert + .assertThat( + exception.getMessage(), + allOf( + containsString( + "The inference result from remote endpoint is not valid, cannot extract the key information from the result." + ) + ) + ); + } + + public void testCreateAnomalyDetectorToolWithSystemIndex() { + prepareIndex(); + String agentId = registerAgent(); + Exception exception = assertThrows( + ResponseException.class, + () -> executeAgent(agentId, "{\"parameters\": {\"index\": \".test\"}}") + ); + MatcherAssert + .assertThat( + exception.getMessage(), + allOf( + containsString( + "CreateAnomalyDetectionTool doesn't support searching indices starting with '.' since it could be system index, current searching index name: .test" + ) + ) + ); + } + + public void testCreateAnomalyDetectorToolWithMissingIndex() { + prepareIndex(); + String agentId = registerAgent(); + Exception exception = assertThrows( + ResponseException.class, + () -> executeAgent(agentId, "{\"parameters\": {\"index\": \"non-existent\"}}") + ); + MatcherAssert + .assertThat( + exception.getMessage(), + allOf( + containsString( + "Return this final answer to human directly and do not use other tools: 'The index doesn't exist, please provide another index and retry'. Please try to directly send this message to human to ask for index name" + ) + ) + ); + } + + public void testCreateAnomalyDetectorToolWithEmptyInput() { + prepareIndex(); + String agentId = registerAgent(); + Exception exception = assertThrows(ResponseException.class, () -> executeAgent(agentId, "{\"parameters\": {}}")); + MatcherAssert + .assertThat( + exception.getMessage(), + allOf( + containsString( + "Return this final answer to human directly and do not use other tools: 'Please provide index name'. Please try to directly send this message to human to ask for index name" + ) + ) + ); + } + + @SneakyThrows + private void prepareIndex() { + createIndexWithConfiguration( + NORMAL_INDEX, + "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"response\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"responseLatency\": {\n" + + " \"type\": \"float\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}" + ); + addDocToIndex(NORMAL_INDEX, "0", List.of("response", "responseLatency", "date"), List.of(200, 0.15, "2024-07-03T10:22:56,520")); + addDocToIndex(NORMAL_INDEX, "1", List.of("response", "responseLatency", "date"), List.of(200, 3.15, "2024-07-03T10:22:57,520")); + + createIndexWithConfiguration( + NORMAL_INDEX_WITH_NO_AVAILABLE_FIELDS, + "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"product\": {\n" + + " " + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}" + ); + addDocToIndex(NORMAL_INDEX_WITH_NO_AVAILABLE_FIELDS, "0", List.of("product", "date"), List.of(1, "2024-07-03T10:22:56,520")); + addDocToIndex(NORMAL_INDEX_WITH_NO_AVAILABLE_FIELDS, "1", List.of("product", "date"), List.of(2, "2024-07-03T10:22:57,520")); + + createIndexWithConfiguration( + NORMAL_INDEX_WITH_NO_DATE_FIELDS, + "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"product\": {\n" + + " " + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}" + ); + addDocToIndex(NORMAL_INDEX_WITH_NO_DATE_FIELDS, "0", List.of("product"), List.of(1)); + addDocToIndex(NORMAL_INDEX_WITH_NO_DATE_FIELDS, "1", List.of("product"), List.of(2)); + + createIndexWithConfiguration(NORMAL_INDEX_WITH_NO_MAPPING, "{}"); + + createIndexWithConfiguration( + ABNORMAL_INDEX, + "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"date\": {\n" + + " " + + " \"type\": \"date\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}" + ); + addDocToIndex(ABNORMAL_INDEX, "0", List.of("date"), List.of(1, "2024-07-03T10:22:56,520")); + addDocToIndex(ABNORMAL_INDEX, "1", List.of("date"), List.of(2, "2024-07-03T10:22:57,520")); + } + + @SneakyThrows + private String registerAgentWithWrongModelId() { + String registerAgentRequestBody = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource("org/opensearch/agent/tools/register_flow_agent_of_create_anomaly_detector_tool_request_body.json") + .toURI() + ) + ); + registerAgentRequestBody = registerAgentRequestBody.replace("", "non-existent"); + return createAgent(registerAgentRequestBody); + } + + @SneakyThrows + private String registerAgent() { + String registerAgentRequestBody = Files + .readString( + Path + .of( + this + .getClass() + .getClassLoader() + .getResource("org/opensearch/agent/tools/register_flow_agent_of_create_anomaly_detector_tool_request_body.json") + .toURI() + ) + ); + registerAgentRequestBody = registerAgentRequestBody.replace("", modelId); + return createAgent(registerAgentRequestBody); + } +} diff --git a/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_create_anomaly_detector_tool_request_body.json b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_create_anomaly_detector_tool_request_body.json new file mode 100644 index 00000000..3ad9477e --- /dev/null +++ b/src/test/resources/org/opensearch/agent/tools/register_flow_agent_of_create_anomaly_detector_tool_request_body.json @@ -0,0 +1,12 @@ +{ + "name": "Test_create_anomaly_detector_flow_agent", + "type": "flow", + "tools": [ + { + "type": "CreateAnomalyDetectorTool", + "parameters": { + "model_id": "" + } + } + ] +} From dc7bd7a12fc4e08a55d316f0da19f77392b91eef Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 19 Jul 2024 09:11:11 +0800 Subject: [PATCH 103/119] Increment version to 2.16.0-SNAPSHOT (#324) Signed-off-by: opensearch-ci-bot Co-authored-by: opensearch-ci-bot --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 415751d3..46c87ff8 100644 --- a/build.gradle +++ b/build.gradle @@ -11,7 +11,7 @@ import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin buildscript { ext { opensearch_group = "org.opensearch" - opensearch_version = System.getProperty("opensearch.version", "2.15.0-SNAPSHOT") + opensearch_version = System.getProperty("opensearch.version", "2.16.0-SNAPSHOT") buildVersionQualifier = System.getProperty("build.version_qualifier", "") isSnapshot = "true" == System.getProperty("build.snapshot", "true") version_tokens = opensearch_version.tokenize('-') From 4c32da722a1544fd324645230a0700bdeab78734 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Thu, 25 Jul 2024 15:18:47 +0800 Subject: [PATCH 104/119] fix(deps): update dependency lombok to v1.18.34 (#266) (#365) (cherry picked from commit 19e8ec8d55f5caccd1a935038d7663f7a520b534) Signed-off-by: mend-for-github-com[bot] Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] Co-authored-by: mend-for-github-com[bot] <50673670+mend-for-github-com[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 46c87ff8..4708277f 100644 --- a/build.gradle +++ b/build.gradle @@ -48,7 +48,7 @@ plugins { } lombok { - version = "1.18.30" + version = "1.18.34" } repositories { From c0d3affcfa544ab92b7d6e78e790e42123c4551d Mon Sep 17 00:00:00 2001 From: zane-neo Date: Thu, 25 Jul 2024 16:00:54 +0800 Subject: [PATCH 105/119] chore(deps): update dependency gradle to v8.9 (#275) (#366) Signed-off-by: mend-for-github-com[bot] Co-authored-by: mend-for-github-com[bot] <50673670+mend-for-github-com[bot]@users.noreply.github.com> (cherry picked from commit 03770d59dcd271f12379a506b52b6dc723226bd1) Co-authored-by: mend-for-github-com[bot] <50673670+mend-for-github-com[bot]@users.noreply.github.com> --- gradle/wrapper/gradle-wrapper.jar | Bin 63721 -> 43504 bytes gradle/wrapper/gradle-wrapper.properties | 5 +++-- gradlew | 7 +++++-- gradlew.bat | 2 ++ 4 files changed, 10 insertions(+), 4 deletions(-) diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 7f93135c49b765f8051ef9d0a6055ff8e46073d8..2c3521197d7c4586c843d1d3e9090525f1898cde 100644 GIT binary patch literal 43504 zcma&N1CXTcmMvW9vTb(Rwr$&4wr$(C?dmSu>@vG-+vuvg^_??!{yS%8zW-#zn-LkA z5&1^$^{lnmUON?}LBF8_K|(?T0Ra(xUH{($5eN!MR#ZihR#HxkUPe+_R8Cn`RRs(P z_^*#_XlXmGv7!4;*Y%p4nw?{bNp@UZHv1?Um8r6)Fei3p@ClJn0ECfg1hkeuUU@Or zDaPa;U3fE=3L}DooL;8f;P0ipPt0Z~9P0)lbStMS)ag54=uL9ia-Lm3nh|@(Y?B`; zx_#arJIpXH!U{fbCbI^17}6Ri*H<>OLR%c|^mh8+)*h~K8Z!9)DPf zR2h?lbDZQ`p9P;&DQ4F0sur@TMa!Y}S8irn(%d-gi0*WxxCSk*A?3lGh=gcYN?FGl z7D=Js!i~0=u3rox^eO3i@$0=n{K1lPNU zwmfjRVmLOCRfe=seV&P*1Iq=^i`502keY8Uy-WNPwVNNtJFx?IwAyRPZo2Wo1+S(xF37LJZ~%i)kpFQ3Fw=mXfd@>%+)RpYQLnr}B~~zoof(JVm^^&f zxKV^+3D3$A1G;qh4gPVjhrC8e(VYUHv#dy^)(RoUFM?o%W-EHxufuWf(l*@-l+7vt z=l`qmR56K~F|v<^Pd*p~1_y^P0P^aPC##d8+HqX4IR1gu+7w#~TBFphJxF)T$2WEa zxa?H&6=Qe7d(#tha?_1uQys2KtHQ{)Qco)qwGjrdNL7thd^G5i8Os)CHqc>iOidS} z%nFEDdm=GXBw=yXe1W-ShHHFb?Cc70+$W~z_+}nAoHFYI1MV1wZegw*0y^tC*s%3h zhD3tN8b=Gv&rj}!SUM6|ajSPp*58KR7MPpI{oAJCtY~JECm)*m_x>AZEu>DFgUcby z1Qaw8lU4jZpQ_$;*7RME+gq1KySGG#Wql>aL~k9tLrSO()LWn*q&YxHEuzmwd1?aAtI zBJ>P=&$=l1efe1CDU;`Fd+_;&wI07?V0aAIgc(!{a z0Jg6Y=inXc3^n!U0Atk`iCFIQooHqcWhO(qrieUOW8X(x?(RD}iYDLMjSwffH2~tB z)oDgNBLB^AJBM1M^c5HdRx6fBfka`(LD-qrlh5jqH~);#nw|iyp)()xVYak3;Ybik z0j`(+69aK*B>)e_p%=wu8XC&9e{AO4c~O1U`5X9}?0mrd*m$_EUek{R?DNSh(=br# z#Q61gBzEpmy`$pA*6!87 zSDD+=@fTY7<4A?GLqpA?Pb2z$pbCc4B4zL{BeZ?F-8`s$?>*lXXtn*NC61>|*w7J* z$?!iB{6R-0=KFmyp1nnEmLsA-H0a6l+1uaH^g%c(p{iT&YFrbQ$&PRb8Up#X3@Zsk zD^^&LK~111%cqlP%!_gFNa^dTYT?rhkGl}5=fL{a`UViaXWI$k-UcHJwmaH1s=S$4 z%4)PdWJX;hh5UoK?6aWoyLxX&NhNRqKam7tcOkLh{%j3K^4Mgx1@i|Pi&}<^5>hs5 zm8?uOS>%)NzT(%PjVPGa?X%`N2TQCKbeH2l;cTnHiHppPSJ<7y-yEIiC!P*ikl&!B z%+?>VttCOQM@ShFguHVjxX^?mHX^hSaO_;pnyh^v9EumqSZTi+#f&_Vaija0Q-e*| z7ulQj6Fs*bbmsWp{`auM04gGwsYYdNNZcg|ph0OgD>7O}Asn7^Z=eI>`$2*v78;sj-}oMoEj&@)9+ycEOo92xSyY344^ z11Hb8^kdOvbf^GNAK++bYioknrpdN>+u8R?JxG=!2Kd9r=YWCOJYXYuM0cOq^FhEd zBg2puKy__7VT3-r*dG4c62Wgxi52EMCQ`bKgf*#*ou(D4-ZN$+mg&7$u!! z-^+Z%;-3IDwqZ|K=ah85OLwkO zKxNBh+4QHh)u9D?MFtpbl)us}9+V!D%w9jfAMYEb>%$A;u)rrI zuBudh;5PN}_6J_}l55P3l_)&RMlH{m!)ai-i$g)&*M`eN$XQMw{v^r@-125^RRCF0 z^2>|DxhQw(mtNEI2Kj(;KblC7x=JlK$@78`O~>V!`|1Lm-^JR$-5pUANAnb(5}B}JGjBsliK4& zk6y(;$e&h)lh2)L=bvZKbvh@>vLlreBdH8No2>$#%_Wp1U0N7Ank!6$dFSi#xzh|( zRi{Uw%-4W!{IXZ)fWx@XX6;&(m_F%c6~X8hx=BN1&q}*( zoaNjWabE{oUPb!Bt$eyd#$5j9rItB-h*5JiNi(v^e|XKAj*8(k<5-2$&ZBR5fF|JA z9&m4fbzNQnAU}r8ab>fFV%J0z5awe#UZ|bz?Ur)U9bCIKWEzi2%A+5CLqh?}K4JHi z4vtM;+uPsVz{Lfr;78W78gC;z*yTch~4YkLr&m-7%-xc ztw6Mh2d>_iO*$Rd8(-Cr1_V8EO1f*^@wRoSozS) zy1UoC@pruAaC8Z_7~_w4Q6n*&B0AjOmMWa;sIav&gu z|J5&|{=a@vR!~k-OjKEgPFCzcJ>#A1uL&7xTDn;{XBdeM}V=l3B8fE1--DHjSaxoSjNKEM9|U9#m2<3>n{Iuo`r3UZp;>GkT2YBNAh|b z^jTq-hJp(ebZh#Lk8hVBP%qXwv-@vbvoREX$TqRGTgEi$%_F9tZES@z8Bx}$#5eeG zk^UsLBH{bc2VBW)*EdS({yw=?qmevwi?BL6*=12k9zM5gJv1>y#ML4!)iiPzVaH9% zgSImetD@dam~e>{LvVh!phhzpW+iFvWpGT#CVE5TQ40n%F|p(sP5mXxna+Ev7PDwA zamaV4m*^~*xV+&p;W749xhb_X=$|LD;FHuB&JL5?*Y2-oIT(wYY2;73<^#46S~Gx| z^cez%V7x$81}UWqS13Gz80379Rj;6~WdiXWOSsdmzY39L;Hg3MH43o*y8ibNBBH`(av4|u;YPq%{R;IuYow<+GEsf@R?=@tT@!}?#>zIIn0CoyV!hq3mw zHj>OOjfJM3F{RG#6ujzo?y32m^tgSXf@v=J$ELdJ+=5j|=F-~hP$G&}tDZsZE?5rX ztGj`!S>)CFmdkccxM9eGIcGnS2AfK#gXwj%esuIBNJQP1WV~b~+D7PJTmWGTSDrR` zEAu4B8l>NPuhsk5a`rReSya2nfV1EK01+G!x8aBdTs3Io$u5!6n6KX%uv@DxAp3F@{4UYg4SWJtQ-W~0MDb|j-$lwVn znAm*Pl!?Ps&3wO=R115RWKb*JKoexo*)uhhHBncEDMSVa_PyA>k{Zm2(wMQ(5NM3# z)jkza|GoWEQo4^s*wE(gHz?Xsg4`}HUAcs42cM1-qq_=+=!Gk^y710j=66(cSWqUe zklbm8+zB_syQv5A2rj!Vbw8;|$@C!vfNmNV!yJIWDQ>{+2x zKjuFX`~~HKG~^6h5FntRpnnHt=D&rq0>IJ9#F0eM)Y-)GpRjiN7gkA8wvnG#K=q{q z9dBn8_~wm4J<3J_vl|9H{7q6u2A!cW{bp#r*-f{gOV^e=8S{nc1DxMHFwuM$;aVI^ zz6A*}m8N-&x8;aunp1w7_vtB*pa+OYBw=TMc6QK=mbA-|Cf* zvyh8D4LRJImooUaSb7t*fVfih<97Gf@VE0|z>NcBwBQze);Rh!k3K_sfunToZY;f2 z^HmC4KjHRVg+eKYj;PRN^|E0>Gj_zagfRbrki68I^#~6-HaHg3BUW%+clM1xQEdPYt_g<2K+z!$>*$9nQ>; zf9Bei{?zY^-e{q_*|W#2rJG`2fy@{%6u0i_VEWTq$*(ZN37|8lFFFt)nCG({r!q#9 z5VK_kkSJ3?zOH)OezMT{!YkCuSSn!K#-Rhl$uUM(bq*jY? zi1xbMVthJ`E>d>(f3)~fozjg^@eheMF6<)I`oeJYx4*+M&%c9VArn(OM-wp%M<-`x z7sLP1&3^%Nld9Dhm@$3f2}87!quhI@nwd@3~fZl_3LYW-B?Ia>ui`ELg z&Qfe!7m6ze=mZ`Ia9$z|ARSw|IdMpooY4YiPN8K z4B(ts3p%2i(Td=tgEHX z0UQ_>URBtG+-?0E;E7Ld^dyZ;jjw0}XZ(}-QzC6+NN=40oDb2^v!L1g9xRvE#@IBR zO!b-2N7wVfLV;mhEaXQ9XAU+>=XVA6f&T4Z-@AX!leJ8obP^P^wP0aICND?~w&NykJ#54x3_@r7IDMdRNy4Hh;h*!u(Ol(#0bJdwEo$5437-UBjQ+j=Ic>Q2z` zJNDf0yO6@mr6y1#n3)s(W|$iE_i8r@Gd@!DWDqZ7J&~gAm1#~maIGJ1sls^gxL9LLG_NhU!pTGty!TbhzQnu)I*S^54U6Yu%ZeCg`R>Q zhBv$n5j0v%O_j{QYWG!R9W?5_b&67KB$t}&e2LdMvd(PxN6Ir!H4>PNlerpBL>Zvyy!yw z-SOo8caEpDt(}|gKPBd$qND5#a5nju^O>V&;f890?yEOfkSG^HQVmEbM3Ugzu+UtH zC(INPDdraBN?P%kE;*Ae%Wto&sgw(crfZ#Qy(<4nk;S|hD3j{IQRI6Yq|f^basLY; z-HB&Je%Gg}Jt@={_C{L$!RM;$$|iD6vu#3w?v?*;&()uB|I-XqEKqZPS!reW9JkLewLb!70T7n`i!gNtb1%vN- zySZj{8-1>6E%H&=V}LM#xmt`J3XQoaD|@XygXjdZ1+P77-=;=eYpoEQ01B@L*a(uW zrZeZz?HJsw_4g0vhUgkg@VF8<-X$B8pOqCuWAl28uB|@r`19DTUQQsb^pfqB6QtiT z*`_UZ`fT}vtUY#%sq2{rchyfu*pCg;uec2$-$N_xgjZcoumE5vSI{+s@iLWoz^Mf; zuI8kDP{!XY6OP~q5}%1&L}CtfH^N<3o4L@J@zg1-mt{9L`s^z$Vgb|mr{@WiwAqKg zp#t-lhrU>F8o0s1q_9y`gQNf~Vb!F%70f}$>i7o4ho$`uciNf=xgJ>&!gSt0g;M>*x4-`U)ysFW&Vs^Vk6m%?iuWU+o&m(2Jm26Y(3%TL; zA7T)BP{WS!&xmxNw%J=$MPfn(9*^*TV;$JwRy8Zl*yUZi8jWYF>==j~&S|Xinsb%c z2?B+kpet*muEW7@AzjBA^wAJBY8i|#C{WtO_or&Nj2{=6JTTX05}|H>N2B|Wf!*3_ z7hW*j6p3TvpghEc6-wufFiY!%-GvOx*bZrhZu+7?iSrZL5q9}igiF^*R3%DE4aCHZ zqu>xS8LkW+Auv%z-<1Xs92u23R$nk@Pk}MU5!gT|c7vGlEA%G^2th&Q*zfg%-D^=f z&J_}jskj|Q;73NP4<4k*Y%pXPU2Thoqr+5uH1yEYM|VtBPW6lXaetokD0u z9qVek6Q&wk)tFbQ8(^HGf3Wp16gKmr>G;#G(HRBx?F`9AIRboK+;OfHaLJ(P>IP0w zyTbTkx_THEOs%Q&aPrxbZrJlio+hCC_HK<4%f3ZoSAyG7Dn`=X=&h@m*|UYO-4Hq0 z-Bq&+Ie!S##4A6OGoC~>ZW`Y5J)*ouaFl_e9GA*VSL!O_@xGiBw!AF}1{tB)z(w%c zS1Hmrb9OC8>0a_$BzeiN?rkPLc9%&;1CZW*4}CDDNr2gcl_3z+WC15&H1Zc2{o~i) z)LLW=WQ{?ricmC`G1GfJ0Yp4Dy~Ba;j6ZV4r{8xRs`13{dD!xXmr^Aga|C=iSmor% z8hi|pTXH)5Yf&v~exp3o+sY4B^^b*eYkkCYl*T{*=-0HniSA_1F53eCb{x~1k3*`W zr~};p1A`k{1DV9=UPnLDgz{aJH=-LQo<5%+Em!DNN252xwIf*wF_zS^!(XSm(9eoj z=*dXG&n0>)_)N5oc6v!>-bd(2ragD8O=M|wGW z!xJQS<)u70m&6OmrF0WSsr@I%T*c#Qo#Ha4d3COcX+9}hM5!7JIGF>7<~C(Ear^Sn zm^ZFkV6~Ula6+8S?oOROOA6$C&q&dp`>oR-2Ym3(HT@O7Sd5c~+kjrmM)YmgPH*tL zX+znN>`tv;5eOfX?h{AuX^LK~V#gPCu=)Tigtq9&?7Xh$qN|%A$?V*v=&-2F$zTUv z`C#WyIrChS5|Kgm_GeudCFf;)!WH7FI60j^0o#65o6`w*S7R@)88n$1nrgU(oU0M9 zx+EuMkC>(4j1;m6NoGqEkpJYJ?vc|B zOlwT3t&UgL!pX_P*6g36`ZXQ; z9~Cv}ANFnJGp(;ZhS(@FT;3e)0)Kp;h^x;$*xZn*k0U6-&FwI=uOGaODdrsp-!K$Ac32^c{+FhI-HkYd5v=`PGsg%6I`4d9Jy)uW0y%) zm&j^9WBAp*P8#kGJUhB!L?a%h$hJgQrx!6KCB_TRo%9{t0J7KW8!o1B!NC)VGLM5! zpZy5Jc{`r{1e(jd%jsG7k%I+m#CGS*BPA65ZVW~fLYw0dA-H_}O zrkGFL&P1PG9p2(%QiEWm6x;U-U&I#;Em$nx-_I^wtgw3xUPVVu zqSuKnx&dIT-XT+T10p;yjo1Y)z(x1fb8Dzfn8e yu?e%!_ptzGB|8GrCfu%p?(_ zQccdaaVK$5bz;*rnyK{_SQYM>;aES6Qs^lj9lEs6_J+%nIiuQC*fN;z8md>r_~Mfl zU%p5Dt_YT>gQqfr@`cR!$NWr~+`CZb%dn;WtzrAOI>P_JtsB76PYe*<%H(y>qx-`Kq!X_; z<{RpAqYhE=L1r*M)gNF3B8r(<%8mo*SR2hu zccLRZwGARt)Hlo1euqTyM>^!HK*!Q2P;4UYrysje@;(<|$&%vQekbn|0Ruu_Io(w4#%p6ld2Yp7tlA`Y$cciThP zKzNGIMPXX%&Ud0uQh!uQZz|FB`4KGD?3!ND?wQt6!n*f4EmCoJUh&b?;B{|lxs#F- z31~HQ`SF4x$&v00@(P+j1pAaj5!s`)b2RDBp*PB=2IB>oBF!*6vwr7Dp%zpAx*dPr zb@Zjq^XjN?O4QcZ*O+8>)|HlrR>oD*?WQl5ri3R#2?*W6iJ>>kH%KnnME&TT@ZzrHS$Q%LC?n|e>V+D+8D zYc4)QddFz7I8#}y#Wj6>4P%34dZH~OUDb?uP%-E zwjXM(?Sg~1!|wI(RVuxbu)-rH+O=igSho_pDCw(c6b=P zKk4ATlB?bj9+HHlh<_!&z0rx13K3ZrAR8W)!@Y}o`?a*JJsD+twZIv`W)@Y?Amu_u zz``@-e2X}27$i(2=9rvIu5uTUOVhzwu%mNazS|lZb&PT;XE2|B&W1>=B58#*!~D&) zfVmJGg8UdP*fx(>Cj^?yS^zH#o-$Q-*$SnK(ZVFkw+er=>N^7!)FtP3y~Xxnu^nzY zikgB>Nj0%;WOltWIob|}%lo?_C7<``a5hEkx&1ku$|)i>Rh6@3h*`slY=9U}(Ql_< zaNG*J8vb&@zpdhAvv`?{=zDedJ23TD&Zg__snRAH4eh~^oawdYi6A3w8<Ozh@Kw)#bdktM^GVb zrG08?0bG?|NG+w^&JvD*7LAbjED{_Zkc`3H!My>0u5Q}m!+6VokMLXxl`Mkd=g&Xx z-a>m*#G3SLlhbKB!)tnzfWOBV;u;ftU}S!NdD5+YtOjLg?X}dl>7m^gOpihrf1;PY zvll&>dIuUGs{Qnd- zwIR3oIrct8Va^Tm0t#(bJD7c$Z7DO9*7NnRZorrSm`b`cxz>OIC;jSE3DO8`hX955ui`s%||YQtt2 z5DNA&pG-V+4oI2s*x^>-$6J?p=I>C|9wZF8z;VjR??Icg?1w2v5Me+FgAeGGa8(3S z4vg*$>zC-WIVZtJ7}o9{D-7d>zCe|z#<9>CFve-OPAYsneTb^JH!Enaza#j}^mXy1 z+ULn^10+rWLF6j2>Ya@@Kq?26>AqK{A_| zQKb*~F1>sE*=d?A?W7N2j?L09_7n+HGi{VY;MoTGr_)G9)ot$p!-UY5zZ2Xtbm=t z@dpPSGwgH=QtIcEulQNI>S-#ifbnO5EWkI;$A|pxJd885oM+ zGZ0_0gDvG8q2xebj+fbCHYfAXuZStH2j~|d^sBAzo46(K8n59+T6rzBwK)^rfPT+B zyIFw)9YC-V^rhtK`!3jrhmW-sTmM+tPH+;nwjL#-SjQPUZ53L@A>y*rt(#M(qsiB2 zx6B)dI}6Wlsw%bJ8h|(lhkJVogQZA&n{?Vgs6gNSXzuZpEyu*xySy8ro07QZ7Vk1!3tJphN_5V7qOiyK8p z#@jcDD8nmtYi1^l8ml;AF<#IPK?!pqf9D4moYk>d99Im}Jtwj6c#+A;f)CQ*f-hZ< z=p_T86jog%!p)D&5g9taSwYi&eP z#JuEK%+NULWus;0w32-SYFku#i}d~+{Pkho&^{;RxzP&0!RCm3-9K6`>KZpnzS6?L z^H^V*s!8<>x8bomvD%rh>Zp3>Db%kyin;qtl+jAv8Oo~1g~mqGAC&Qi_wy|xEt2iz zWAJEfTV%cl2Cs<1L&DLRVVH05EDq`pH7Oh7sR`NNkL%wi}8n>IXcO40hp+J+sC!W?!krJf!GJNE8uj zg-y~Ns-<~D?yqbzVRB}G>0A^f0!^N7l=$m0OdZuqAOQqLc zX?AEGr1Ht+inZ-Qiwnl@Z0qukd__a!C*CKuGdy5#nD7VUBM^6OCpxCa2A(X;e0&V4 zM&WR8+wErQ7UIc6LY~Q9x%Sn*Tn>>P`^t&idaOEnOd(Ufw#>NoR^1QdhJ8s`h^|R_ zXX`c5*O~Xdvh%q;7L!_!ohf$NfEBmCde|#uVZvEo>OfEq%+Ns7&_f$OR9xsihRpBb z+cjk8LyDm@U{YN>+r46?nn{7Gh(;WhFw6GAxtcKD+YWV?uge>;+q#Xx4!GpRkVZYu zzsF}1)7$?%s9g9CH=Zs+B%M_)+~*j3L0&Q9u7!|+T`^O{xE6qvAP?XWv9_MrZKdo& z%IyU)$Q95AB4!#hT!_dA>4e@zjOBD*Y=XjtMm)V|+IXzjuM;(l+8aA5#Kaz_$rR6! zj>#&^DidYD$nUY(D$mH`9eb|dtV0b{S>H6FBfq>t5`;OxA4Nn{J(+XihF(stSche7$es&~N$epi&PDM_N`As;*9D^L==2Q7Z2zD+CiU(|+-kL*VG+&9!Yb3LgPy?A zm7Z&^qRG_JIxK7-FBzZI3Q<;{`DIxtc48k> zc|0dmX;Z=W$+)qE)~`yn6MdoJ4co;%!`ddy+FV538Y)j(vg}5*k(WK)KWZ3WaOG!8 z!syGn=s{H$odtpqFrT#JGM*utN7B((abXnpDM6w56nhw}OY}0TiTG1#f*VFZr+^-g zbP10`$LPq_;PvrA1XXlyx2uM^mrjTzX}w{yuLo-cOClE8MMk47T25G8M!9Z5ypOSV zAJUBGEg5L2fY)ZGJb^E34R2zJ?}Vf>{~gB!8=5Z) z9y$>5c)=;o0HeHHSuE4U)#vG&KF|I%-cF6f$~pdYJWk_dD}iOA>iA$O$+4%@>JU08 zS`ep)$XLPJ+n0_i@PkF#ri6T8?ZeAot$6JIYHm&P6EB=BiaNY|aA$W0I+nz*zkz_z zkEru!tj!QUffq%)8y0y`T&`fuus-1p>=^hnBiBqD^hXrPs`PY9tU3m0np~rISY09> z`P3s=-kt_cYcxWd{de@}TwSqg*xVhp;E9zCsnXo6z z?f&Sv^U7n4`xr=mXle94HzOdN!2kB~4=%)u&N!+2;z6UYKUDqi-s6AZ!haB;@&B`? z_TRX0%@suz^TRdCb?!vNJYPY8L_}&07uySH9%W^Tc&1pia6y1q#?*Drf}GjGbPjBS zbOPcUY#*$3sL2x4v_i*Y=N7E$mR}J%|GUI(>WEr+28+V z%v5{#e!UF*6~G&%;l*q*$V?&r$Pp^sE^i-0$+RH3ERUUdQ0>rAq2(2QAbG}$y{de( z>{qD~GGuOk559Y@%$?N^1ApVL_a704>8OD%8Y%8B;FCt%AoPu8*D1 zLB5X>b}Syz81pn;xnB}%0FnwazlWfUV)Z-~rZg6~b z6!9J$EcE&sEbzcy?CI~=boWA&eeIa%z(7SE^qgVLz??1Vbc1*aRvc%Mri)AJaAG!p z$X!_9Ds;Zz)f+;%s&dRcJt2==P{^j3bf0M=nJd&xwUGlUFn?H=2W(*2I2Gdu zv!gYCwM10aeus)`RIZSrCK=&oKaO_Ry~D1B5!y0R=%!i2*KfXGYX&gNv_u+n9wiR5 z*e$Zjju&ODRW3phN925%S(jL+bCHv6rZtc?!*`1TyYXT6%Ju=|X;6D@lq$8T zW{Y|e39ioPez(pBH%k)HzFITXHvnD6hw^lIoUMA;qAJ^CU?top1fo@s7xT13Fvn1H z6JWa-6+FJF#x>~+A;D~;VDs26>^oH0EI`IYT2iagy23?nyJ==i{g4%HrAf1-*v zK1)~@&(KkwR7TL}L(A@C_S0G;-GMDy=MJn2$FP5s<%wC)4jC5PXoxrQBFZ_k0P{{s@sz+gX`-!=T8rcB(=7vW}^K6oLWMmp(rwDh}b zwaGGd>yEy6fHv%jM$yJXo5oMAQ>c9j`**}F?MCry;T@47@r?&sKHgVe$MCqk#Z_3S z1GZI~nOEN*P~+UaFGnj{{Jo@16`(qVNtbU>O0Hf57-P>x8Jikp=`s8xWs^dAJ9lCQ z)GFm+=OV%AMVqVATtN@|vp61VVAHRn87}%PC^RAzJ%JngmZTasWBAWsoAqBU+8L8u z4A&Pe?fmTm0?mK-BL9t+{y7o(7jm+RpOhL9KnY#E&qu^}B6=K_dB}*VlSEiC9fn)+V=J;OnN)Ta5v66ic1rG+dGAJ1 z1%Zb_+!$=tQ~lxQrzv3x#CPb?CekEkA}0MYSgx$Jdd}q8+R=ma$|&1a#)TQ=l$1tQ z=tL9&_^vJ)Pk}EDO-va`UCT1m#Uty1{v^A3P~83_#v^ozH}6*9mIjIr;t3Uv%@VeW zGL6(CwCUp)Jq%G0bIG%?{_*Y#5IHf*5M@wPo6A{$Um++Co$wLC=J1aoG93&T7Ho}P z=mGEPP7GbvoG!uD$k(H3A$Z))+i{Hy?QHdk>3xSBXR0j!11O^mEe9RHmw!pvzv?Ua~2_l2Yh~_!s1qS`|0~0)YsbHSz8!mG)WiJE| z2f($6TQtt6L_f~ApQYQKSb=`053LgrQq7G@98#igV>y#i==-nEjQ!XNu9 z~;mE+gtj4IDDNQJ~JVk5Ux6&LCSFL!y=>79kE9=V}J7tD==Ga+IW zX)r7>VZ9dY=V&}DR))xUoV!u(Z|%3ciQi_2jl}3=$Agc(`RPb z8kEBpvY>1FGQ9W$n>Cq=DIpski};nE)`p3IUw1Oz0|wxll^)4dq3;CCY@RyJgFgc# zKouFh!`?Xuo{IMz^xi-h=StCis_M7yq$u) z?XHvw*HP0VgR+KR6wI)jEMX|ssqYvSf*_3W8zVTQzD?3>H!#>InzpSO)@SC8q*ii- z%%h}_#0{4JG;Jm`4zg};BPTGkYamx$Xo#O~lBirRY)q=5M45n{GCfV7h9qwyu1NxOMoP4)jjZMxmT|IQQh0U7C$EbnMN<3)Kk?fFHYq$d|ICu>KbY_hO zTZM+uKHe(cIZfEqyzyYSUBZa8;Fcut-GN!HSA9ius`ltNebF46ZX_BbZNU}}ZOm{M2&nANL9@0qvih15(|`S~z}m&h!u4x~(%MAO$jHRWNfuxWF#B)E&g3ghSQ9|> z(MFaLQj)NE0lowyjvg8z0#m6FIuKE9lDO~Glg}nSb7`~^&#(Lw{}GVOS>U)m8bF}x zVjbXljBm34Cs-yM6TVusr+3kYFjr28STT3g056y3cH5Tmge~ASxBj z%|yb>$eF;WgrcOZf569sDZOVwoo%8>XO>XQOX1OyN9I-SQgrm;U;+#3OI(zrWyow3 zk==|{lt2xrQ%FIXOTejR>;wv(Pb8u8}BUpx?yd(Abh6? zsoO3VYWkeLnF43&@*#MQ9-i-d0t*xN-UEyNKeyNMHw|A(k(_6QKO=nKMCxD(W(Yop zsRQ)QeL4X3Lxp^L%wzi2-WVSsf61dqliPUM7srDB?Wm6Lzn0&{*}|IsKQW;02(Y&| zaTKv|`U(pSzuvR6Rduu$wzK_W-Y-7>7s?G$)U}&uK;<>vU}^^ns@Z!p+9?St1s)dG zK%y6xkPyyS1$~&6v{kl?Md6gwM|>mt6Upm>oa8RLD^8T{0?HC!Z>;(Bob7el(DV6x zi`I)$&E&ngwFS@bi4^xFLAn`=fzTC;aimE^!cMI2n@Vo%Ae-ne`RF((&5y6xsjjAZ zVguVoQ?Z9uk$2ON;ersE%PU*xGO@T*;j1BO5#TuZKEf(mB7|g7pcEA=nYJ{s3vlbg zd4-DUlD{*6o%Gc^N!Nptgay>j6E5;3psI+C3Q!1ZIbeCubW%w4pq9)MSDyB{HLm|k zxv-{$$A*pS@csolri$Ge<4VZ}e~78JOL-EVyrbxKra^d{?|NnPp86!q>t<&IP07?Z z^>~IK^k#OEKgRH+LjllZXk7iA>2cfH6+(e&9ku5poo~6y{GC5>(bRK7hwjiurqAiZ zg*DmtgY}v83IjE&AbiWgMyFbaRUPZ{lYiz$U^&Zt2YjG<%m((&_JUbZcfJ22(>bi5 z!J?<7AySj0JZ&<-qXX;mcV!f~>G=sB0KnjWca4}vrtunD^1TrpfeS^4dvFr!65knK zZh`d;*VOkPs4*-9kL>$GP0`(M!j~B;#x?Ba~&s6CopvO86oM?-? zOw#dIRc;6A6T?B`Qp%^<U5 z19x(ywSH$_N+Io!6;e?`tWaM$`=Db!gzx|lQ${DG!zb1Zl&|{kX0y6xvO1o z220r<-oaS^^R2pEyY;=Qllqpmue|5yI~D|iI!IGt@iod{Opz@*ml^w2bNs)p`M(Io z|E;;m*Xpjd9l)4G#KaWfV(t8YUn@A;nK^#xgv=LtnArX|vWQVuw3}B${h+frU2>9^ z!l6)!Uo4`5k`<<;E(ido7M6lKTgWezNLq>U*=uz&s=cc$1%>VrAeOoUtA|T6gO4>UNqsdK=NF*8|~*sl&wI=x9-EGiq*aqV!(VVXA57 zw9*o6Ir8Lj1npUXvlevtn(_+^X5rzdR>#(}4YcB9O50q97%rW2me5_L=%ffYPUSRc z!vv?Kv>dH994Qi>U(a<0KF6NH5b16enCp+mw^Hb3Xs1^tThFpz!3QuN#}KBbww`(h z7GO)1olDqy6?T$()R7y%NYx*B0k_2IBiZ14&8|JPFxeMF{vW>HF-ViB*%t0;Thq2} z+qP}n=Cp0wwr%5S+qN<7?r+``=l(h0z2`^8j;g2~Q4u?{cIL{JYY%l|iw&YH4FL(8 z1-*E#ANDHi+1f%lMJbRfq*`nG)*#?EJEVoDH5XdfqwR-C{zmbQoh?E zhW!|TvYv~>R*OAnyZf@gC+=%}6N90yU@E;0b_OV#xL9B?GX(D&7BkujjFC@HVKFci zb_>I5e!yuHA1LC`xm&;wnn|3ht3h7|rDaOsh0ePhcg_^Wh8Bq|AGe`4t5Gk(9^F;M z8mFr{uCm{)Uq0Xa$Fw6+da`C4%)M_#jaX$xj;}&Lzc8wTc%r!Y#1akd|6FMf(a4I6 z`cQqS_{rm0iLnhMG~CfDZc96G3O=Tihnv8g;*w?)C4N4LE0m#H1?-P=4{KeC+o}8b zZX)x#(zEysFm$v9W8-4lkW%VJIjM~iQIVW)A*RCO{Oe_L;rQ3BmF*bhWa}!=wcu@# zaRWW{&7~V-e_$s)j!lJsa-J?z;54!;KnU3vuhp~(9KRU2GKYfPj{qA?;#}H5f$Wv-_ zGrTb(EAnpR0*pKft3a}6$npzzq{}ApC&=C&9KoM3Ge@24D^8ZWJDiXq@r{hP=-02& z@Qrn-cbr2YFc$7XR0j7{jAyR;4LLBf_XNSrmd{dV3;ae;fsEjds*2DZ&@#e)Qcc}w zLgkfW=9Kz|eeM$E`-+=jQSt}*kAwbMBn7AZSAjkHUn4n||NBq*|2QPcKaceA6m)g5 z_}3?DX>90X|35eI7?n+>f9+hl5b>#q`2+`FXbOu9Q94UX-GWH;d*dpmSFd~7WM#H2 zvKNxjOtC)U_tx*0(J)eAI8xAD8SvhZ+VRUA?)| zeJjvg9)vi`Qx;;1QP!c_6hJp1=J=*%!>ug}%O!CoSh-D_6LK0JyiY}rOaqSeja&jb#P|DR7 z_JannlfrFeaE$irfrRIiN|huXmQhQUN6VG*6`bzN4Z3!*G?FjN8!`ZTn6Wn4n=Ync z_|Sq=pO7+~{W2}599SfKz@umgRYj6LR9u0*BaHqdEw^i)dKo5HomT9zzB$I6w$r?6 zs2gu*wNOAMK`+5yPBIxSOJpL$@SN&iUaM zQ3%$EQt%zQBNd`+rl9R~utRDAH%7XP@2Z1s=)ks77I(>#FuwydE5>LzFx)8ye4ClM zb*e2i*E$Te%hTKh7`&rQXz;gvm4Dam(r-!FBEcw*b$U%Wo9DIPOwlC5Ywm3WRCM4{ zF42rnEbBzUP>o>MA){;KANhAW7=FKR=DKK&S1AqSxyP;k z;fp_GVuV}y6YqAd)5p=tJ~0KtaeRQv^nvO?*hZEK-qA;vuIo!}Xgec4QGW2ipf2HK z&G&ppF*1aC`C!FR9(j4&r|SHy74IiDky~3Ab)z@9r&vF+Bapx<{u~gb2?*J zSl{6YcZ$&m*X)X?|8<2S}WDrWN3yhyY7wlf*q`n^z3LT4T$@$y``b{m953kfBBPpQ7hT;zs(Nme`Qw@{_pUO0OG zfugi3N?l|jn-Du3Qn{Aa2#6w&qT+oof=YM!Zq~Xi`vlg<;^)Jreeb^x6_4HL-j}sU z1U^^;-WetwPLKMsdx4QZ$haq3)rA#ATpEh{NXto-tOXjCwO~nJ(Z9F%plZ{z(ZW!e zF>nv&4ViOTs58M+f+sGimF^9cB*9b(gAizwyu5|--SLmBOP-uftqVnVBd$f7YrkJ8!jm*QQEQC zEQ+@T*AA1kV@SPF6H5sT%^$$6!e5;#N((^=OA5t}bqIdqf`PiMMFEDhnV#AQWSfLp zX=|ZEsbLt8Sk&wegQU0&kMC|cuY`&@<#r{t2*sq2$%epiTVpJxWm#OPC^wo_4p++U zU|%XFYs+ZCS4JHSRaVET)jV?lbYAd4ouXx0Ka6*wIFBRgvBgmg$kTNQEvs0=2s^sU z_909)3`Ut!m}}@sv<63E@aQx}-!qVdOjSOnAXTh~MKvr$0nr(1Fj-3uS{U6-T9NG1Y(Ua)Nc}Mi< zOBQz^&^v*$BqmTIO^;r@kpaq3n!BI?L{#bw)pdFV&M?D0HKqC*YBxa;QD_4(RlawI z5wBK;7T^4dT7zt%%P<*-M~m?Et;S^tdNgQSn?4$mFvIHHL!`-@K~_Ar4vBnhy{xuy zigp!>UAwPyl!@~(bkOY;un&B~Evy@5#Y&cEmzGm+)L~4o4~|g0uu&9bh8N0`&{B2b zDj2>biRE1`iw}lv!rl$Smn(4Ob>j<{4dT^TfLe-`cm#S!w_9f;U)@aXWSU4}90LuR zVcbw;`2|6ra88#Cjf#u62xq?J)}I)_y{`@hzES(@mX~}cPWI8}SRoH-H;o~`>JWU$ zhLudK3ug%iS=xjv9tnmOdTXcq_?&o30O;(+VmC&p+%+pd_`V}RY4ibQMNE&N5O+hb3bQ8bxk^33Fu4DB2*~t1909gqoutQHx^plq~;@g$d_+rzS0`2;}2UR2h#?p35B=B*f0BZS4ysiWC!kw?4B-dM%m6_BfRbey1Wh? zT1!@>-y=U}^fxH0A`u1)Mz90G6-<4aW^a@l_9L6Y;cd$3<#xIrhup)XLkFi$W&Ohu z8_j~-VeVXDf9b&6aGelt$g*BzEHgzh)KDgII_Y zb$fcY8?XI6-GEGTZVWW%O;njZld)29a_&1QvNYJ@OpFrUH{er@mnh*}326TYAK7_Z zA={KnK_o3QLk|%m@bx3U#^tCChLxjPxMesOc5D4G+&mvp@Clicz^=kQlWp1|+z|V7 zkU#7l61m@^#`1`{+m2L{sZC#j?#>0)2z4}}kqGhB{NX%~+3{5jOyij!e$5-OAs zDvq+>I2(XsY9%NNhNvKiF<%!6t^7&k{L7~FLdkP9!h%=2Kt$bUt(Zwp*&xq_+nco5 zK#5RCM_@b4WBK*~$CsWj!N!3sF>ijS=~$}_iw@vbKaSp5Jfg89?peR@51M5}xwcHW z(@1TK_kq$c4lmyb=aX3-JORe+JmuNkPP=bM*B?};c=_;h2gT-nt#qbriPkpaqoF@q z<)!80iKvTu`T-B3VT%qKO^lfPQ#m5Ei6Y%Fs@%Pt!8yX&C#tL$=|Ma8i?*^9;}Fk> zyzdQQC5YTBO&gx6kB~yhUUT&%q3a3o+zueh>5D7tdByYVcMz@>j!C@Iyg{N1)veYl`SPshuH6Rk=O6pvVrI71rI5*%uU3u81DpD%qmXsbKWMFR@2m4vO_^l6MMbO9a()DcWmYT&?0B_ zuY~tDiQ6*X7;9B*5pj?;xy_B}*{G}LjW*qU&%*QAyt30@-@O&NQTARZ+%VScr>`s^KX;M!p; z?8)|}P}L_CbOn!u(A{c5?g{s31Kn#7i)U@+_KNU-ZyVD$H7rtOjSht8%N(ST-)%r` z63;Hyp^KIm-?D;E-EnpAAWgz2#z{fawTx_;MR7)O6X~*jm*VUkam7>ueT^@+Gb3-Y zN3@wZls8ibbpaoR2xH=$b3x1Ng5Tai=LT2@_P&4JuBQ!r#Py3ew!ZVH4~T!^TcdyC ze#^@k4a(nNe~G+y zI~yXK@1HHWU4pj{gWT6v@$c(x){cLq*KlFeKy?f$_u##)hDu0X_mwL6uKei~oPd9( zRaF_k&w(J3J8b_`F~?0(Ei_pH}U^c&r$uSYawB8Ybs-JZ|&;vKLWX! z|HFZ%-uBDaP*hMcQKf*|j5!b%H40SPD*#{A`kj|~esk@1?q}-O7WyAm3mD@-vHzw( zTSOlO(K9>GW;@?@xSwpk%X3Ui4_Psm;c*HF~RW+q+C#RO_VT5(x!5B#On-W`T|u z>>=t)W{=B-8wWZejxMaBC9sHzBZGv5uz_uu281kxHg2cll_sZBC&1AKD`CYh2vKeW zm#|MMdC}6A&^DX=>_(etx8f}9o}`(G?Y``M?D+aTPJbZqONmSs>y>WSbvs>7PE~cb zjO+1Y)PMi*!=06^$%< z*{b^66BIl{7zKvz^jut7ylDQBt)ba_F*$UkDgJ2gSNfHB6+`OEiz@xs$Tcrl>X4?o zu9~~b&Xl0?w(7lJXu8-9Yh6V|A3f?)1|~+u-q&6#YV`U2i?XIqUw*lc-QTXwuf@8d zSjMe1BhBKY`Mo{$s%Ce~Hv(^B{K%w{yndEtvyYjjbvFY^rn2>C1Lbi!3RV7F>&;zlSDSk}R>{twI}V zA~NK%T!z=^!qbw(OEgsmSj?#?GR&A$0&K>^(?^4iphc3rN_(xXA%joi)k~DmRLEXl zaWmwMolK%@YiyI|HvX{X$*Ei7y+zJ%m{b}$?N7_SN&p+FpeT%4Z_2`0CP=}Y3D-*@ zL|4W4ja#8*%SfkZzn5sfVknpJv&>glRk^oUqykedE8yCgIwCV)fC1iVwMr4hc#KcV!|M-r_N|nQWw@`j+0(Ywct~kLXQ)Qyncmi{Q4`Ur7A{Ep)n`zCtm8D zVX`kxa8Syc`g$6$($Qc-(_|LtQKWZXDrTir5s*pSVmGhk#dKJzCYT?vqA9}N9DGv> zw}N$byrt?Mk*ZZbN5&zb>pv;rU}EH@Rp54)vhZ=330bLvrKPEPu!WqR%yeM3LB!(E zw|J05Y!tajnZ9Ml*-aX&5T8YtuWDq@on)_*FMhz-?m|>RT0~e3OHllrEMthVY(KwQ zu>ijTc4>Xz-q1(g!ESjaZ+C+Zk5FgmF)rFX29_RmU!`7Pw+0}>8xK^=pOxtUDV)ok zw-=p=OvEH&VO3wToRdI!hPHc`qX+_{T_mj!NxcA&xOgkEuvz`-Aa`ZlNv>qnD0`YT1T3USO0ec!%{KE~UOGPJX%I5_rZDGx@|w zVIMsRPP+}^Xxa&{x!q{hY1wat8jDO7YP0(8xHWeEdrd79lUjB8%)v{X1pQu|1dr*y9M&a(J`038}4>lK&K zIM~6wnX{XA?pFHz{hOmEq{oYBnB@56twXqEcFrFqvCy)sH9B{pQ`G50o{W^t&onwY z-l{ur4#8ylPV5YRLD%%j^d0&_WI>0nmfZ8! zaZ&vo@7D`!=?215+Vk181*U@^{U>VyoXh2F&ZNzZx5tDDtlLc)gi2=|o=GC`uaH;< zFuuF?Q9Q`>S#c(~2p|s49RA`3242`2P+)F)t2N!CIrcl^0#gN@MLRDQ2W4S#MXZJO z8<(9P>MvW;rf2qZ$6sHxCVIr0B-gP?G{5jEDn%W#{T#2_&eIjvlVqm8J$*8A#n`5r zs6PuC!JuZJ@<8cFbbP{cRnIZs>B`?`rPWWL*A?1C3QqGEG?*&!*S0|DgB~`vo_xIo z&n_Sa(>6<$P7%Py{R<>n6Jy?3W|mYYoxe5h^b6C#+UoKJ(zl?^WcBn#|7wMI5=?S# zRgk8l-J`oM%GV&jFc)9&h#9mAyowg^v%Fc-7_^ou5$*YvELa!1q>4tHfX7&PCGqW* zu8In~5`Q5qQvMdToE$w+RP^_cIS2xJjghjCTp6Z(za_D<$S;0Xjt?mAE8~Ym{)zfb zV62v9|59XOvR}wEpm~Cnhyr`=JfC$*o15k?T`3s-ZqF6Gy;Gm+_6H$%oJPywWA^Wl zzn$L=N%{VT8DkQba0|2LqGR#O2Pw!b%LV4#Ojcx5`?Cm;+aLpkyZ=!r1z@E}V= z$2v6v%Ai)MMd`@IM&UD!%%(63VH8+m0Ebk<5Du#0=WeK(E<2~3@>8TceT$wy5F52n zRFtY>G9Gp~h#&R92{G{jLruZSNJ4)gNK+zg*$P zW@~Hf>_Do)tvfEAAMKE1nQ=8coTgog&S;wj(s?Xa0!r?UU5#2>18V#|tKvay1Ka53 zl$RxpMqrkv`Sv&#!_u8$8PMken`QL0_sD2)r&dZziefzSlAdKNKroVU;gRJE#o*}w zP_bO{F4g;|t!iroy^xf~(Q5qc8a3<+vBW%VIOQ1!??d;yEn1at1wpt}*n- z0iQtfu}Isw4ZfH~8p~#RQUKwf<$XeqUr-5?8TSqokdHL7tY|47R; z#d+4NS%Cqp>LQbvvAMIhcCX@|HozKXl)%*5o>P2ZegGuOerV&_MeA}|+o-3L!ZNJd z#1xB^(r!IfE~i>*5r{u;pIfCjhY^Oev$Y1MT16w8pJ0?9@&FH*`d;hS=c#F6fq z{mqsHd*xa;>Hg?j80MwZ%}anqc@&s&2v{vHQS68fueNi5Z(VD2eH>jmv4uvE|HEQm z^=b&?1R9?<@=kjtUfm*I!wPf5Xnma(4*DfPk}Es*H$%NGCIM1qt(LSvbl7&tV>e2$ zUqvZOTiwQyxDoxL(mn?n_x%Tre?L&!FYCOy0>o}#DTC3uSPnyGBv*}!*Yv5IV)Bg_t%V+UrTXfr!Q8+eX}ANR*YLzwme7Rl z@q_*fP7wP2AZ(3WG*)4Z(q@)~c{Je&7?w^?&Wy3)v0{TvNQRGle9mIG>$M2TtQ(Vf z3*PV@1mX)}beRTPjoG#&&IO#Mn(DLGp}mn)_0e=9kXDewC8Pk@yo<8@XZjFP-_zic z{mocvT9Eo)H4Oj$>1->^#DbbiJn^M4?v7XbK>co+v=7g$hE{#HoG6ZEat!s~I<^_s zlFee93KDSbJKlv_+GPfC6P8b>(;dlJ5r9&Pc4kC2uR(0{Kjf+SMeUktef``iXD}8` zGufkM9*Sx4>+5WcK#Vqm$g#5z1DUhc_#gLGe4_icSzN5GKr|J&eB)LS;jTXWA$?(k zy?*%U9Q#Y88(blIlxrtKp6^jksNF>-K1?8=pmYAPj?qq}yO5L>_s8CAv=LQMe3J6? zOfWD>Kx_5A4jRoIU}&aICTgdYMqC|45}St;@0~7>Af+uK3vps9D!9qD)1;Y6Fz>4^ zR1X$s{QNZl7l%}Zwo2wXP+Cj-K|^wqZW?)s1WUw_APZLhH55g{wNW3liInD)WHh${ zOz&K>sB*4inVY3m)3z8w!yUz+CKF%_-s2KVr7DpwTUuZjPS9k-em^;>H4*?*B0Bg7 zLy2nfU=ac5N}x1+Tlq^lkNmB~Dj+t&l#fO&%|7~2iw*N!*xBy+ZBQ>#g_;I*+J{W* z=@*15><)Bh9f>>dgQrEhkrr2FEJ;R2rH%`kda8sD-FY6e#7S-<)V*zQA>)Ps)L- zgUuu@5;Ych#jX_KZ+;qEJJbu{_Z9WSsLSo#XqLpCK$gFidk}gddW(9$v}iyGm_OoH ztn$pv81zROq686_7@avq2heXZnkRi4n(3{5jTDO?9iP%u8S4KEqGL?^uBeg(-ws#1 z9!!Y_2Q~D?gCL3MQZO!n$+Wy(Twr5AS3{F7ak2f)Bu0iG^k^x??0}b6l!>Vjp{e*F z8r*(Y?3ZDDoS1G?lz#J4`d9jAEc9YGq1LbpYoFl!W!(j8-33Ey)@yx+BVpDIVyvpZ zq5QgKy>P}LlV?Bgy@I)JvefCG)I69H1;q@{8E8Ytw^s-rC7m5>Q>ZO(`$`9@`49s2)q#{2eN0A?~qS8%wxh%P*99h*Sv` zW_z3<=iRZBQKaDsKw^TfN;6`mRck|6Yt&e$R~tMA0ix;qgw$n~fe=62aG2v0S`7mU zI}gR#W)f+Gn=e3mm*F^r^tcv&S`Rym`X`6K`i8g-a0!p|#69@Bl!*&)QJ9(E7ycxz z)5-m9v`~$N1zszFi^=m%vw}Y{ZyYub!-6^KIY@mwF|W+|t~bZ%@rifEZ-28I@s$C` z>E+k~R1JC-M>8iC_GR>V9f9+uL2wPRATL9bC(sxd;AMJ>v6c#PcG|Xx1N5^1>ISd0 z4%vf-SNOw+1%yQq1YP`>iqq>5Q590_pr?OxS|HbLjx=9~Y)QO37RihG%JrJ^=Nj>g zPTcO$6r{jdE_096b&L;Wm8vcxUVxF0mA%W`aZz4n6XtvOi($ zaL!{WUCh&{5ar=>u)!mit|&EkGY$|YG<_)ZD)I32uEIWwu`R-_ z`FVeKyrx3>8Ep#2~%VVrQ%u#exo!anPe`bc)-M=^IP1n1?L2UQ@# zpNjoq-0+XCfqXS!LwMgFvG$PkX}5^6yxW)6%`S8{r~BA2-c%-u5SE#%mQ~5JQ=o$c z%+qa0udVq9`|=2n=0k#M=yiEh_vp?(tB|{J{EhVLPM^S@f-O*Lgb390BvwK7{wfdMKqUc0uIXKj5>g^z z#2`5^)>T73Eci+=E4n&jl42E@VYF2*UDiWLUOgF#p9`E4&-A#MJLUa&^hB@g7KL+n zr_bz+kfCcLIlAevILckIq~RCwh6dc5@%yN@#f3lhHIx4fZ_yT~o0#3@h#!HCN(rHHC6#0$+1AMq?bY~(3nn{o5g8{*e_#4RhW)xPmK zTYBEntuYd)`?`bzDksI9*MG$=^w!iiIcWg1lD&kM1NF@qKha0fDVz^W7JCam^!AQFxY@7*`a3tfBwN0uK_~YBQ18@^i%=YB}K0Iq(Q3 z=7hNZ#!N@YErE7{T|{kjVFZ+f9Hn($zih;f&q^wO)PJSF`K)|LdT>!^JLf=zXG>>G z15TmM=X`1%Ynk&dvu$Vic!XyFC(c=qM33v&SIl|p+z6Ah9(XQ0CWE^N-LgE#WF6Z+ zb_v`7^Rz8%KKg_@B>5*s-q*TVwu~MCRiXvVx&_3#r1h&L+{rM&-H6 zrcgH@I>0eY8WBX#Qj}Vml+fpv?;EQXBbD0lx%L?E4)b-nvrmMQS^}p_CI3M24IK(f| zV?tWzkaJXH87MBz^HyVKT&oHB;A4DRhZy;fIC-TlvECK)nu4-3s7qJfF-ZZGt7+6C3xZt!ZX4`M{eN|q!y*d^B+cF5W- zc9C|FzL;$bAfh56fg&y0j!PF8mjBV!qA=z$=~r-orU-{0AcQUt4 zNYC=_9(MOWe$Br9_50i#0z!*a1>U6ZvH>JYS9U$kkrCt7!mEUJR$W#Jt5vT?U&LCD zd@)kn%y|rkV|CijnZ((B2=j_rB;`b}F9+E1T46sg_aOPp+&*W~44r9t3AI}z)yUFJ z+}z5E6|oq+oPC3Jli)EPh9)o^B4KUYkk~AU9!g`OvC`a!#Q>JmDiMLTx>96_iDD9h@nW%Je4%>URwYM%5YU1&Dcdulvv3IH3GSrA4$)QjlGwUt6 zsR6+PnyJ$1x{|R=ogzErr~U|X!+b+F8=6y?Yi`E$yjWXsdmxZa^hIqa)YV9ubUqOj&IGY}bk zH4*DEn({py@MG5LQCI;J#6+98GaZYGW-K-&C`(r5#?R0Z){DlY8ZZk}lIi$xG}Q@2 z0LJhzuus-7dLAEpG1Lf+KOxn&NSwO{wn_~e0=}dovX)T(|WRMTqacoW8;A>8tTDr+0yRa+U!LW z!H#Gnf^iCy$tTk3kBBC=r@xhskjf1}NOkEEM4*r+A4`yNAIjz`_JMUI#xTf$+{UA7 zpBO_aJkKz)iaKqRA{8a6AtpdUwtc#Y-hxtZnWz~i(sfjMk`lq|kGea=`62V6y)TMPZw8q}tFDDHrW_n(Z84ZxWvRrntcw;F|Mv4ff9iaM% z4IM{=*zw}vIpbg=9%w&v`sA+a3UV@Rpn<6`c&5h+8a7izP>E@7CSsCv*AAvd-izwU z!sGJQ?fpCbt+LK`6m2Z3&cKtgcElAl){*m0b^0U#n<7?`8ktdIe#ytZTvaZy728o6 z3GDmw=vhh*U#hCo0gb9s#V5(IILXkw>(6a?BFdIb0%3~Y*5FiMh&JWHd2n(|y@?F8 zL$%!)uFu&n+1(6)oW6Hx*?{d~y zBeR)N*Z{7*gMlhMOad#k4gf`37OzEJ&pH?h!Z4#mNNCfnDI@LbiU~&2Gd^q7ix8~Y6$a=B9bK(BaTEO0$Oh=VCkBPwt0 zf#QuB25&2!m7MWY5xV_~sf(0|Y*#Wf8+FQI(sl2wgdM5H7V{aH6|ntE+OcLsTC`u; zeyrlkJgzdIb5=n#SCH)+kjN)rYW7=rppN3Eb;q_^8Zi}6jtL@eZ2XO^w{mCwX(q!t ztM^`%`ndZ5c+2@?p>R*dDNeVk#v>rsn>vEo;cP2Ecp=@E>A#n0!jZACKZ1=D0`f|{ zZnF;Ocp;$j86m}Gt~N+Ch6CJo7+Wzv|nlsXBvm z?St-5Ke&6hbGAWoO!Z2Rd8ARJhOY|a1rm*sOif%Th`*=^jlgWo%e9`3sS51n*>+Mh(9C7g@*mE|r%h*3k6I_uo;C!N z7CVMIX4kbA#gPZf_0%m18+BVeS4?D;U$QC`TT;X zP#H}tMsa=zS6N7n#BA$Fy8#R7vOesiCLM@d1UO6Tsnwv^gb}Q9I}ZQLI?--C8ok&S z9Idy06+V(_aj?M78-*vYBu|AaJ9mlEJpFEIP}{tRwm?G{ag>6u(ReBKAAx zDR6qe!3G88NQP$i99DZ~CW9lzz}iGynvGA4!yL}_9t`l*SZbEL-%N{n$%JgpDHJRn zvh<{AqR7z@ylV`kXdk+uEu-WWAt^=A4n(J=A1e8DpeLzAd;Nl#qlmp#KcHU!8`YJY zvBZy@>WiBZpx*wQ8JzKw?@k}8l99Wo&H>__vCFL}>m~MTmGvae% zPTn9?iR=@7NJ)?e+n-4kx$V#qS4tLpVUX*Je0@`f5LICdxLnph&Vjbxd*|+PbzS(l zBqqMlUeNoo8wL&_HKnM^8{iDI3IdzJAt32UupSr6XXh9KH2LjWD)Pz+`cmps%eHeD zU%i1SbPuSddp6?th;;DfUlxYnjRpd~i7vQ4V`cD%4+a9*!{+#QRBr5^Q$5Ec?gpju zv@dk9;G>d7QNEdRy}fgeA?i=~KFeibDtYffy)^OP?Ro~-X!onDpm+uGpe&6)*f@xJ zE1I3Qh}`1<7aFB@TS#}ee={<#9%1wOL%cuvOd($y4MC2?`1Nin=pVLXPkknn*0kx> z!9XHW${hYEV;r6F#iz7W=fg|a@GY0UG5>>9>$3Bj5@!N{nWDD`;JOdz_ZaZVVIUgH zo+<=+n8VGL*U%M|J$A~#ll__<`y+jL>bv;TpC!&|d=q%E2B|5p=)b-Q+ZrFO%+D_u z4%rc8BmOAO6{n(i(802yZW93?U;K^ZZlo0Gvs7B+<%}R;$%O}pe*Gi;!xP-M73W`k zXLv473Ex_VPcM-M^JO|H>KD;!sEGJ|E}Qepen;yNG2 zXqgD5sjQUDI(XLM+^8ZX1s_(X+PeyQ$Q5RukRt|Kwr-FSnW!^9?OG64UYX1^bU9d8 zJ}8K&UEYG+Je^cThf8W*^RqG07nSCmp*o5Z;#F zS?jochDWX@p+%CZ%dOKUl}q{9)^U@}qkQtA3zBF)`I&zyIKgb{mv)KtZ}?_h{r#VZ z%C+hwv&nB?we0^H+H`OKGw-&8FaF;=ei!tAclS5Q?qH9J$nt+YxdKkbRFLnWvn7GH zezC6<{mK0dd763JlLFqy&Oe|7UXII;K&2pye~yG4jldY~N;M9&rX}m76NsP=R#FEw zt(9h+=m9^zfl=6pH*D;JP~OVgbJkXh(+2MO_^;%F{V@pc2nGn~=U)Qx|JEV-e=vXk zPxA2J<9~IH{}29#X~KW$(1reJv}lc4_1JF31gdev>!CddVhf_62nsr6%w)?IWxz}{ z(}~~@w>c07!r=FZANq4R!F2Qi2?QGavZ{)PCq~X}3x;4ylsd&m;dQe;0GFSn5 zZ*J<=Xg1fEGYYDZ0{Z4}Jh*xlXa}@412nlKSM#@wjMM z*0(k>Gfd1Mj)smUuX}EM6m)811%n5zzr}T?$ZzH~*3b`3q3gHSpA<3cbzTeRDi`SA zT{O)l3%bH(CN0EEF9ph1(Osw5y$SJolG&Db~uL!I3U{X`h(h%^KsL71`2B1Yn z7(xI+Fk?|xS_Y5)x?oqk$xmjG@_+JdErI(q95~UBTvOXTQaJs?lgrC6Wa@d0%O0cC zzvslIeWMo0|C0({iEWX{=5F)t4Z*`rh@-t0ZTMse3VaJ`5`1zeUK0~F^KRY zj2z-gr%sR<(u0@SNEp%Lj38AB2v-+cd<8pKdtRU&8t3eYH#h7qH%bvKup4cnnrN>l z!5fve)~Y5_U9US`uXDFoOtx2gI&Z!t&VPIoqiv>&H(&1;J9b}kZhcOX7EiW*Bujy#MaCl52%NO-l|@2$aRKvZ!YjwpXwC#nA(tJtd1p?jx&U|?&jcb!0MT6oBlWurVRyiSCX?sN3j}d zh3==XK$^*8#zr+U^wk(UkF}bta4bKVgr`elH^az{w(m}3%23;y7dsEnH*pp{HW$Uk zV9J^I9ea7vp_A}0F8qF{>|rj`CeHZ?lf%HImvEJF<@7cgc1Tw%vAUA47{Qe(sP^5M zT=z<~l%*ZjJvObcWtlN?0$b%NdAj&l`Cr|x((dFs-njsj9%IIqoN|Q?tYtJYlRNIu zY(LtC-F14)Og*_V@gjGH^tLV4uN?f^#=dscCFV~a`r8_o?$gj3HrSk=YK2k^UW)sJ z&=a&&JkMkWshp0sto$c6j8f$J!Bsn*MTjC`3cv@l@7cINa!}fNcu(0XF7ZCAYbX|WJIL$iGx8l zGFFQsw}x|i!jOZIaP{@sw0BrV5Z5u!TGe@JGTzvH$}55Gf<;rieZlz+6E1}z_o3m2 z(t;Cp^Geen7iSt)ZVtC`+tzuv^<6--M`^5JXBeeLXV)>2;f7=l%(-4?+<5~;@=Th{1#>rK3+rLn(44TAFS@u(}dunUSYu}~))W*fr` zkBL}3k_@a4pXJ#u*_N|e#1gTqxE&WPsfDa=`@LL?PRR()9^HxG?~^SNmeO#^-5tMw zeGEW&CuX(Uz#-wZOEt8MmF}hQc%14L)0=ebo`e$$G6nVrb)afh!>+Nfa5P;N zCCOQ^NRel#saUVt$Ds0rGd%gkKP2LsQRxq6)g*`-r(FGM!Q51c|9lk!ha8Um3ys1{ zWpT7XDWYshQ{_F!8D8@3hvXhQDw;GlkUOzni&T1>^uD){WH3wRONgjh$u4u7?+$(Y zqTXEF>1aPNZCXP0nJ;zs6_%6;+D&J_|ugcih**y(4ApT`RKAi5>SZe0Bz|+l7z>P14>0ljIH*LhK z@}2O#{?1RNa&!~sEPBvIkm-uIt^Pt#%JnsbJ`-T0%pb ze}d;dzJFu7oQ=i`VHNt%Sv@?7$*oO`Rt*bRNhXh{FArB`9#f%ksG%q?Z`_<19;dBW z5pIoIo-JIK9N$IE1)g8@+4}_`sE7;Lus&WNAJ^H&=4rGjeAJP%Dw!tn*koQ&PrNZw zY88=H7qpHz11f}oTD!0lWO>pMI;i4sauS`%_!zM!n@91sLH#rz1~iEAu#1b%LA zhB}7{1(8{1{V8+SEs=*f=FcRE^;`6Pxm$Hie~|aD~W1BYy#@Y$C?pxJh*cC!T@8C9{xx*T*8P zhbkRk3*6)Zbk%}u>^?ItOhxdmX$j9KyoxxN>NrYGKMkLF4*fLsL_PRjHNNHCyaUHN z7W8yEhf&ag07fc9FD>B{t0#Civsoy0hvVepDREX(NK1LbK0n*>UJp&1FygZMg7T^G z(02BS)g#qMOI{RJIh7}pGNS8WhSH@kG+4n=(8j<+gVfTur)s*hYus70AHUBS2bN6Zp_GOHYxsbg{-Rcet{@0gzE`t$M0_!ZIqSAIW53j+Ln7N~8J zLZ0DOUjp^j`MvX#hq5dFixo^1szoQ=FTqa|@m>9F@%>7OuF9&_C_MDco&-{wfLKNrDMEN4pRUS8-SD6@GP`>_7$;r>dJo>KbeXm>GfQS? zjFS+Y6^%pDCaI0?9(z^ELsAE1`WhbhNv5DJ$Y}~r;>FynHjmjmA{bfDbseZXsKUv`%Fekv)1@f%7ti;B5hhs}5db1dP+P0${1DgKtb(DvN}6H6;0*LP6blg*rpr;Z(7? zrve>M`x6ZI(wtQc4%lO?v5vr{0iTPl&JT!@k-7qUN8b$O9YuItu7zrQ*$?xJIN#~b z#@z|*5z&D7g5>!o(^v+3N?JnJns5O2W4EkF>re*q1uVjgT#6ROP5>Ho)XTJoHDNRC zuLC(Cd_ZM?FAFPoMw;3FM4Ln0=!+vgTYBx2TdXpM@EhDCorzTS6@2`swp4J^9C0)U zq?)H8)=D;i+H`EVYge>kPy8d*AxKl};iumYu^UeM+e_3>O+LY`D4?pD%;Vextj!(; zomJ(u+dR(0m>+-61HTV7!>03vqozyo@uY@Zh^KrW`w7^ENCYh86_P2VC|4}(ilMBe zwa&B|1a7%Qkd>d14}2*_yYr@8-N}^&?LfSwr)C~UUHr)ydENu=?ZHkvoLS~xTiBH= zD%A=OdoC+10l7@rXif~Z#^AvW+4M-(KQBj=Nhgts)>xmA--IJf1jSZF6>@Ns&nmv} zXRk`|`@P5_9W4O-SI|f^DCZ-n*yX@2gf6N)epc~lRWl7QgCyXdx|zr^gy>q`Vwn^y z&r3_zS}N=HmrVtTZhAQS`3$kBmVZDqr4+o(oNok?tqel9kn3;uUerFRti=k+&W{bb zT{ZtEf51Qf+|Jc*@(nyn#U+nr1SFpu4(I7<1a=)M_yPUAcKVF+(vK!|DTL2;P)yG~ zrI*7V)wN_92cM)j`PtAOFz_dO)jIfTeawh2{d@x0nd^#?pDkBTBzr0Oxgmvjt`U^$ zcTPl=iwuen=;7ExMVh7LLFSKUrTiPJpMB&*Ml32>wl} zYn(H0N4+>MCrm2BC4p{meYPafDEXd4yf$i%ylWpC|9%R4XZBUQiha(x%wgQ5iJ?K_wQBRfw z+pYuKoIameAWV7Ex4$PCd>bYD7)A9J`ri&bwTRN*w~7DR0EeLXW|I2()Zkl6vxiw? zFBX){0zT@w_4YUT4~@TXa;nPb^Tu$DJ=vluc~9)mZ}uHd#4*V_eS7)^eZ9oI%Wws_ z`;97^W|?_Z6xHSsE!3EKHPN<3IZ^jTJW=Il{rMmlnR#OuoE6dqOO1KOMpW84ZtDHNn)(pYvs=frO`$X}sY zKY0At$G85&2>B|-{*+B*aqQn&Mqjt*DVH2kdwEm5f}~Xwn9+tPt?EPwh8=8=VWA8rjt*bHEs1FJ92QohQ)Y z4sQH~AzB5!Pisyf?pVa0?L4gthx2;SKlrr?XRU`?Y>RJgUeJn!az#sNF7oDbzksrD zw8)f=f1t*UK&$}_ktf!yf4Rjt{56ffTA{A=9n})E7~iXaQkE+%GW4zqbmlYF(|hE@ z421q9`UQf$uA5yDLx67`=EnSTxdEaG!6C%9_obpb?;u-^QFX% zU1wQ}Li{PeT^fS;&Sk2#$ZM#Zpxrn7jsd<@qhfWy*H)cw9q!I9!fDOCw~4zg zbW`EHsTp9IQUCETUse)!ZmuRICx}0Oe1KVoqdK+u>67A8v`*X*!*_i5`_qTzYRkbYXg#4vT5~A{lK#bA}Oc4ePu5hr-@;i%Z!4Y;-(yR z(1rHYTc7i1h1aipP4DaIY3g2kF#MX{XW7g&zL!39ohO98=eo5nZtq+nz}2E$OZpxx z&OFaOM1O;?mxq+`%k>YS!-=H7BB&WhqSTUC{S!x*k9E zcB;u0I!h%3nEchQwu1GnNkaQxuWnW0D@Xq5j@5WE@E(WlgDU;FLsT*eV|Bh)aH0;~@^yygFj<=+Vu3p)LlF%1AA%y5z-Oh`2 z$RDKk_6r+f#I`8fQ%y#Wx%~de1qkWL2(q^~veLKwht-dIcpt(@lc>`~@mISRIPKPm zD!Za&aX@7dy*CT!&Z7JC1jP2@8+ro8SmlH>_gzRte%ojgiwfd?TR+%Ny0`sp`QRLy zl5TiQkFhIC!2aaJ&=Ua`c9UuOk9GkSFZ}!IGeMZ5MXrL zGtMj`m{(X9+l%=d|L zW2OY?8!_pyhvJ1@O!Chsf6}@3HmKq@)x;CFItPMpkSr@npO&8zMc_O?*|sqkuL^U? zV9+x3vbr|6;Ft0J^J>IH_xpa<{S5K?u-sQWC7FB9YFMwoCKK3WZ*gvO-wAApF`K%#7@1 z^sEj4*%hH`f0@sRDGI|#Dl20o$Z*gttP$q(_?#~2!H9(!d=)I93-3)?e%@$1^*F=t9t&OQ9!p84Z`+y<$yQ9wlamK~Hz2CRpS8dWJfBl@(M2qX!9d_F= zd|4A&U~8dX^M25wyC7$Swa22$G61V;fl{%Q4Lh!t_#=SP(sr_pvQ=wqOi`R)do~QX zk*_gsy75$xoi5XE&h7;-xVECk;DLoO0lJ3|6(Ba~ezi73_SYdCZPItS5MKaGE_1My zdQpx?h&RuoQ7I=UY{2Qf ziGQ-FpR%piffR_4X{74~>Q!=i`)J@T415!{8e`AXy`J#ZK)5WWm3oH?x1PVvcAqE@ zWI|DEUgxyN({@Y99vCJVwiGyx@9)y2jNg`R{$s2o;`4!^6nDX_pb~fTuzf>ZoPV@X zXKe1ehcZ+3dxCB+vikgKz8pvH?>ZzlOEObd{(-aWY;F0XIbuIjSA+!%TNy87a>BoX zsae$}Fcw&+)z@n{Fvzo;SkAw0U*}?unSO)^-+sbpNRjD8&qyfp%GNH;YKdHlz^)4( z;n%`#2Pw&DPA8tc)R9FW7EBR3?GDWhf@0(u3G4ijQV;{qp3B)`Fd}kMV}gB2U%4Sy z3x>YU&`V^PU$xWc4J!OG{Jglti@E3rdYo62K31iu!BU&pdo}S66Ctq{NB<88P92Y9 zTOqX$h6HH_8fKH(I>MEJZl1_2GB~xI+!|BLvN;CnQrjHuh?grzUO7h;1AbzLi|_O= z2S=(0tX#nBjN92gRsv;7`rDCATA!o(ZA}6)+;g;T#+1~HXGFD1@3D#|Ky9!E@)u=h z3@zg3Us0BCYmq(pB`^QTp|RB9!lX*{;7r|Z(^>J+av(0-oUmIdR78c4(q%hP#=R@W ze{;yy$T^8kXr(oC*#NQMZSQlgU)aa=BrZDwpLUk5tm&(AkNt&Gel`=ydcL*<@Ypx{ z2uOxl>2vSY2g3%Si&JU<9D5#{_z{9PzJh=miNH;STk^;5#%8iMRfPe#G~T>^U_zt? zgSE)`UQhb!G$at%yCf5MU)<&(L73(hY3*%qqPbX;`%QDHed3ZaWw^k)8Vjd#ePg@;I&pMe+A18k+S+bou|QX?8eQ`{P-0vrm=uR;Y(bHV>d>Gen4LHILqcm_ z3peDMRE3JMA8wWgPkSthI^K<|8aal38qvIcEgLjHAFB0P#IfqP2y}L>=8eBR}Fm^V*mw2Q4+o=exP@*#=Zs zIqHh@neG)Vy%v4cB1!L}w9J>IqAo}CsqbFPrUVc@;~Ld7t_2IIG=15mT7Itrjq#2~ zqX*&nwZP>vso$6W!#` z-YZ}jhBwQku-Qc>TIMpn%_z~`^u4v3Skyf)KA}V{`dr!Q;3xK1TuGYdl}$sKF^9X!*a-R*Oq1#tLq!W)gO}{q`1HM;oh1-k4FU@8W(qe>P05$+ z`ud2&;4IW4vq8#2yA{G>OH=G+pS_jctJ*BqD$j-MI#avR+<>m-`H1@{3VgKYn2_Ih z0`2_1qUMRuzgj_V^*;5Ax_0s{_3tYR>|$i#c!F7)#`oVGmsD*M2?%930cBSI4Mj>P zTm&JmUrvDXlB%zeA_7$&ogjGK3>SOlV$ct{4)P0k)Kua%*fx9?)_fkvz<(G=F`KCp zE`0j*=FzH$^Y@iUI}MM2Hf#Yr@oQdlJMB5xe0$aGNk%tgex;0)NEuVYtLEvOt{}ti zL`o$K9HnnUnl*;DTGTNiwr&ydfDp@3Y)g5$pcY9l1-9g;yn6SBr_S9MV8Xl+RWgwb zXL%kZLE4#4rUO(Pj484!=`jy74tQxD0Zg>99vvQ}R$7~GW)-0DVJR@$5}drsp3IQG zlrJL}M{+SdWbrO@+g2BY^a}0VdQtuoml`jJ2s6GsG5D@(^$5pMi3$27psEIOe^n=*Nj|Ug7VXN0OrwMrRq&@sR&vdnsRlI%*$vfmJ~)s z^?lstAT$Ked`b&UZ@A6I<(uCHGZ9pLqNhD_g-kj*Sa#0%(=8j}4zd;@!o;#vJ+Bsd z4&K4RIP>6It9Ir)ey?M6Gi6@JzKNg;=jM=$)gs2#u_WhvuTRwm1x2^*!e%l&j02xz zYInQgI$_V7Epzf3*BU~gos}|EurFj8l}hsI(!5yX!~ECL%cnYMS-e<`AKDL%(G)62 zPU;uF1(~(YbH2444JGh58coXT>(*CdEwaFuyvB|%CULgVQesH$ znB`vk3BMP<-QauWOZ0W6xB5y7?tE5cisG|V;bhY^8+*BH1T0ZLbn&gi12|a9Oa%;I zxvaxX_xe3@ng%;4C?zPHQ1v%dbhjA6Sl7w<*)Nr#F{Ahzj}%n9c&!g5HVrlvUO&R2C)_$x6M9 zahficAbeHL2%jILO>Pq&RPPxl;i{K5#O*Yt15AORTCvkjNfJ)LrN4K{sY7>tGuTQ@ z^?N*+xssG&sfp0c$^vV*H)U1O!fTHk8;Q7@42MT@z6UTd^&DKSxVcC-1OLjl7m63& zBb&goU!hes(GF^yc!107bkV6Pr%;A-WWd@DK2;&=zyiK*0i^0@f?fh2c)4&DRSjrI zk!W^=l^JKlPW9US{*yo?_XT@T2Bx+Cm^+r{*5LVcKVw*ll3+)lkebA-4)o z8f5xHWOx0!FDSs4nv@o@>mxTQrOeKzj@5uL`d>mXSp|#{FE54EE_!KtQNq>-G(&5) ztz?xkqPU16A-8@-quJ|SU^ClZ?bJ2kCJPB|6L>NTDYBprw$WcwCH{B z5qlJ6wK_9sT@Kl6G|Q&$gsl@WT>hE;nDAbH#%f1ZwuOkvWLj{qV$m3LF423&l!^iV zhym*>R>Yyens++~6F5+uZQTCz9t~PEW+e?w)XF2g!^^%6k?@Jcu;MG0FG9!T+Gx{Z zK;31y@(J{!-$k4E{5#Sv(2DGy3EZQY}G_*z*G&CZ_J?m&Fg4IBrvPx1w z1zAb3k}6nT?E)HNCi%}aR^?)%w-DcpBR*tD(r_c{QU6V&2vU-j0;{TVDN6los%YJZ z5C(*ZE#kv-BvlGLDf9>EO#RH_jtolA)iRJ>tSfJpF!#DO+tk% zBAKCwVZwO^p)(Rhk2en$XLfWjQQ`ix>K}Ru6-sn8Ih6k&$$y`zQ}}4dj~o@9gX9_= z#~EkchJqd5$**l}~~6mOl(q#GMIcFg&XCKO;$w>!K14 zko1egAORiG{r|8qj*FsN>?7d`han?*MD#xe^)sOqj;o;hgdaVnBH$BM{_73?znS+R z*G2VHM!Jw6#<FfJ-J%-9AuDW$@mc-Eyk~F{Jbvt` zn;(%DbBDnKIYr~|I>ZTvbH@cxUyw%bp*)OSs}lwO^HTJ2M#u5QsPF0?Jv*OVPfdKv z+t$Z5P!~jzZ~Y!d#iP?S{?M_g%Ua0Q)WawbIx+2uYpcf(7Im%W=rAu4dSceo7RZh# zN38=RmwOJQE$qbPXIuO^E`wSeJKCx3Q76irp~QS#19dusEVCWPrKhK9{7cbIMg9U} TZiJi*F`$tkWLn) literal 63721 zcmb5Wb9gP!wgnp7wrv|bwr$&XvSZt}Z6`anZSUAlc9NHKf9JdJ;NJVr`=eI(_pMp0 zy1VAAG3FfAOI`{X1O)&90s;U4K;XLp008~hCjbEC_fbYfS%6kTR+JtXK>nW$ZR+`W ze|#J8f4A@M|F5BpfUJb5h>|j$jOe}0oE!`Zf6fM>CR?!y@zU(cL8NsKk`a z6tx5mAkdjD;J=LcJ;;Aw8p!v#ouk>mUDZF@ zK>yvw%+bKu+T{Nk@LZ;zkYy0HBKw06_IWcMHo*0HKpTsEFZhn5qCHH9j z)|XpN&{`!0a>Vl+PmdQc)Yg4A(AG-z!+@Q#eHr&g<9D?7E)_aEB?s_rx>UE9TUq|? z;(ggJt>9l?C|zoO@5)tu?EV0x_7T17q4fF-q3{yZ^ipUbKcRZ4Qftd!xO(#UGhb2y>?*@{xq%`(-`2T^vc=#< zx!+@4pRdk&*1ht2OWk^Z5IAQ0YTAXLkL{(D*$gENaD)7A%^XXrCchN&z2x+*>o2FwPFjWpeaL=!tzv#JOW#( z$B)Nel<+$bkH1KZv3&-}=SiG~w2sbDbAWarg%5>YbC|}*d9hBjBkR(@tyM0T)FO$# zPtRXukGPnOd)~z=?avu+4Co@wF}1T)-uh5jI<1$HLtyDrVak{gw`mcH@Q-@wg{v^c zRzu}hMKFHV<8w}o*yg6p@Sq%=gkd~;`_VGTS?L@yVu`xuGy+dH6YOwcP6ZE`_0rK% zAx5!FjDuss`FQ3eF|mhrWkjux(Pny^k$u_)dyCSEbAsecHsq#8B3n3kDU(zW5yE|( zgc>sFQywFj5}U*qtF9Y(bi*;>B7WJykcAXF86@)z|0-Vm@jt!EPoLA6>r)?@DIobIZ5Sx zsc@OC{b|3%vaMbyeM|O^UxEYlEMHK4r)V-{r)_yz`w1*xV0|lh-LQOP`OP`Pk1aW( z8DSlGN>Ts|n*xj+%If~+E_BxK)~5T#w6Q1WEKt{!Xtbd`J;`2a>8boRo;7u2M&iOop4qcy<)z023=oghSFV zST;?S;ye+dRQe>ygiJ6HCv4;~3DHtJ({fWeE~$H@mKn@Oh6Z(_sO>01JwH5oA4nvK zr5Sr^g+LC zLt(i&ecdmqsIJGNOSUyUpglvhhrY8lGkzO=0USEKNL%8zHshS>Qziu|`eyWP^5xL4 zRP122_dCJl>hZc~?58w~>`P_s18VoU|7(|Eit0-lZRgLTZKNq5{k zE?V=`7=R&ro(X%LTS*f+#H-mGo_j3dm@F_krAYegDLk6UV{`UKE;{YSsn$ z(yz{v1@p|p!0>g04!eRSrSVb>MQYPr8_MA|MpoGzqyd*$@4j|)cD_%^Hrd>SorF>@ zBX+V<@vEB5PRLGR(uP9&U&5=(HVc?6B58NJT_igiAH*q~Wb`dDZpJSKfy5#Aag4IX zj~uv74EQ_Q_1qaXWI!7Vf@ZrdUhZFE;L&P_Xr8l@GMkhc#=plV0+g(ki>+7fO%?Jb zl+bTy7q{w^pTb{>(Xf2q1BVdq?#f=!geqssXp z4pMu*q;iiHmA*IjOj4`4S&|8@gSw*^{|PT}Aw~}ZXU`6=vZB=GGeMm}V6W46|pU&58~P+?LUs%n@J}CSrICkeng6YJ^M? zS(W?K4nOtoBe4tvBXs@@`i?4G$S2W&;$z8VBSM;Mn9 zxcaEiQ9=vS|bIJ>*tf9AH~m&U%2+Dim<)E=}KORp+cZ^!@wI`h1NVBXu{@%hB2Cq(dXx_aQ9x3mr*fwL5!ZryQqi|KFJuzvP zK1)nrKZ7U+B{1ZmJub?4)Ln^J6k!i0t~VO#=q1{?T)%OV?MN}k5M{}vjyZu#M0_*u z8jwZKJ#Df~1jcLXZL7bnCEhB6IzQZ-GcoQJ!16I*39iazoVGugcKA{lhiHg4Ta2fD zk1Utyc5%QzZ$s3;p0N+N8VX{sd!~l*Ta3|t>lhI&G`sr6L~G5Lul`>m z{!^INm?J|&7X=;{XveF!(b*=?9NAp4y&r&N3(GKcW4rS(Ejk|Lzs1PrxPI_owB-`H zg3(Rruh^&)`TKA6+_!n>RdI6pw>Vt1_j&+bKIaMTYLiqhZ#y_=J8`TK{Jd<7l9&sY z^^`hmi7^14s16B6)1O;vJWOF$=$B5ONW;;2&|pUvJlmeUS&F;DbSHCrEb0QBDR|my zIs+pE0Y^`qJTyH-_mP=)Y+u^LHcuZhsM3+P||?+W#V!_6E-8boP#R-*na4!o-Q1 zVthtYhK{mDhF(&7Okzo9dTi03X(AE{8cH$JIg%MEQca`S zy@8{Fjft~~BdzWC(di#X{ny;!yYGK9b@=b|zcKZ{vv4D8i+`ilOPl;PJl{!&5-0!w z^fOl#|}vVg%=n)@_e1BrP)`A zKPgs`O0EO}Y2KWLuo`iGaKu1k#YR6BMySxQf2V++Wo{6EHmK>A~Q5o73yM z-RbxC7Qdh0Cz!nG+7BRZE>~FLI-?&W_rJUl-8FDIaXoNBL)@1hwKa^wOr1($*5h~T zF;%f^%<$p8Y_yu(JEg=c_O!aZ#)Gjh$n(hfJAp$C2he555W5zdrBqjFmo|VY+el;o z=*D_w|GXG|p0**hQ7~9-n|y5k%B}TAF0iarDM!q-jYbR^us(>&y;n^2l0C%@2B}KM zyeRT9)oMt97Agvc4sEKUEy%MpXr2vz*lb zh*L}}iG>-pqDRw7ud{=FvTD?}xjD)w{`KzjNom-$jS^;iw0+7nXSnt1R@G|VqoRhE%12nm+PH?9`(4rM0kfrZzIK9JU=^$YNyLvAIoxl#Q)xxDz!^0@zZ zSCs$nfcxK_vRYM34O<1}QHZ|hp4`ioX3x8(UV(FU$J@o%tw3t4k1QPmlEpZa2IujG&(roX_q*%e`Hq|);0;@k z0z=fZiFckp#JzW0p+2A+D$PC~IsakhJJkG(c;CqAgFfU0Z`u$PzG~-9I1oPHrCw&)@s^Dc~^)#HPW0Ra}J^=|h7Fs*<8|b13ZzG6MP*Q1dkoZ6&A^!}|hbjM{2HpqlSXv_UUg1U4gn z3Q)2VjU^ti1myodv+tjhSZp%D978m~p& z43uZUrraHs80Mq&vcetqfQpQP?m!CFj)44t8Z}k`E798wxg&~aCm+DBoI+nKq}&j^ zlPY3W$)K;KtEajks1`G?-@me7C>{PiiBu+41#yU_c(dITaqE?IQ(DBu+c^Ux!>pCj zLC|HJGU*v+!it1(;3e`6igkH(VA)-S+k(*yqxMgUah3$@C zz`7hEM47xr>j8^g`%*f=6S5n>z%Bt_Fg{Tvmr+MIsCx=0gsu_sF`q2hlkEmisz#Fy zj_0;zUWr;Gz}$BS%Y`meb(=$d%@Crs(OoJ|}m#<7=-A~PQbyN$x%2iXP2@e*nO0b7AwfH8cCUa*Wfu@b)D_>I*%uE4O3 z(lfnB`-Xf*LfC)E}e?%X2kK7DItK6Tf<+M^mX0Ijf_!IP>7c8IZX%8_#0060P{QMuV^B9i<^E`_Qf0pv9(P%_s8D`qvDE9LK9u-jB}J2S`(mCO&XHTS04Z5Ez*vl^T%!^$~EH8M-UdwhegL>3IQ*)(MtuH2Xt1p!fS4o~*rR?WLxlA!sjc2(O znjJn~wQ!Fp9s2e^IWP1C<4%sFF}T4omr}7+4asciyo3DntTgWIzhQpQirM$9{EbQd z3jz9vS@{aOqTQHI|l#aUV@2Q^Wko4T0T04Me4!2nsdrA8QY1%fnAYb~d2GDz@lAtfcHq(P7 zaMBAGo}+NcE-K*@9y;Vt3*(aCaMKXBB*BJcD_Qnxpt75r?GeAQ}*|>pYJE=uZb73 zC>sv)18)q#EGrTG6io*}JLuB_jP3AU1Uiu$D7r|2_zlIGb9 zjhst#ni)Y`$)!fc#reM*$~iaYoz~_Cy7J3ZTiPm)E?%`fbk`3Tu-F#`{i!l5pNEn5 zO-Tw-=TojYhzT{J=?SZj=Z8#|eoF>434b-DXiUsignxXNaR3 zm_}4iWU$gt2Mw5NvZ5(VpF`?X*f2UZDs1TEa1oZCif?Jdgr{>O~7}-$|BZ7I(IKW`{f;@|IZFX*R8&iT= zoWstN8&R;}@2Ka%d3vrLtR|O??ben;k8QbS-WB0VgiCz;<$pBmIZdN!aalyCSEm)crpS9dcD^Y@XT1a3+zpi-`D}e#HV<} z$Y(G&o~PvL-xSVD5D?JqF3?B9rxGWeb=oEGJ3vRp5xfBPlngh1O$yI95EL+T8{GC@ z98i1H9KhZGFl|;`)_=QpM6H?eDPpw~^(aFQWwyXZ8_EEE4#@QeT_URray*mEOGsGc z6|sdXtq!hVZo=d#+9^@lm&L5|q&-GDCyUx#YQiccq;spOBe3V+VKdjJA=IL=Zn%P} zNk=_8u}VhzFf{UYZV0`lUwcD&)9AFx0@Fc6LD9A6Rd1=ga>Mi0)_QxM2ddCVRmZ0d z+J=uXc(?5JLX3=)e)Jm$HS2yF`44IKhwRnm2*669_J=2LlwuF5$1tAo@ROSU@-y+;Foy2IEl2^V1N;fk~YR z?&EP8#t&m0B=?aJeuz~lHjAzRBX>&x=A;gIvb>MD{XEV zV%l-+9N-)i;YH%nKP?>f`=?#`>B(`*t`aiPLoQM(a6(qs4p5KFjDBN?8JGrf3z8>= zi7sD)c)Nm~x{e<^jy4nTx${P~cwz_*a>%0_;ULou3kHCAD7EYkw@l$8TN#LO9jC( z1BeFW`k+bu5e8Ns^a8dPcjEVHM;r6UX+cN=Uy7HU)j-myRU0wHd$A1fNI~`4;I~`zC)3ul#8#^rXVSO*m}Ag>c%_;nj=Nv$rCZ z*~L@C@OZg%Q^m)lc-kcX&a*a5`y&DaRxh6O*dfhLfF+fU5wKs(1v*!TkZidw*)YBP za@r`3+^IHRFeO%!ai%rxy;R;;V^Fr=OJlpBX;(b*3+SIw}7= zIq$*Thr(Zft-RlY)D3e8V;BmD&HOfX+E$H#Y@B3?UL5L~_fA-@*IB-!gItK7PIgG9 zgWuGZK_nuZjHVT_Fv(XxtU%)58;W39vzTI2n&)&4Dmq7&JX6G>XFaAR{7_3QB6zsT z?$L8c*WdN~nZGiscY%5KljQARN;`w$gho=p006z;n(qIQ*Zu<``TMO3n0{ARL@gYh zoRwS*|Niw~cR!?hE{m*y@F`1)vx-JRfqET=dJ5_(076st(=lFfjtKHoYg`k3oNmo_ zNbQEw8&sO5jAYmkD|Zaz_yUb0rC})U!rCHOl}JhbYIDLzLvrZVw0~JO`d*6f;X&?V=#T@ND*cv^I;`sFeq4 z##H5;gpZTb^0Hz@3C*~u0AqqNZ-r%rN3KD~%Gw`0XsIq$(^MEb<~H(2*5G^<2(*aI z%7}WB+TRlMIrEK#s0 z93xn*Ohb=kWFc)BNHG4I(~RPn-R8#0lqyBBz5OM6o5|>x9LK@%HaM}}Y5goCQRt2C z{j*2TtT4ne!Z}vh89mjwiSXG=%DURar~=kGNNaO_+Nkb+tRi~Rkf!7a$*QlavziD( z83s4GmQ^Wf*0Bd04f#0HX@ua_d8 z23~z*53ePD6@xwZ(vdl0DLc=>cPIOPOdca&MyR^jhhKrdQO?_jJh`xV3GKz&2lvP8 zEOwW6L*ufvK;TN{=S&R@pzV^U=QNk^Ec}5H z+2~JvEVA{`uMAr)?Kf|aW>33`)UL@bnfIUQc~L;TsTQ6>r-<^rB8uoNOJ>HWgqMI8 zSW}pZmp_;z_2O5_RD|fGyTxaxk53Hg_3Khc<8AUzV|ZeK{fp|Ne933=1&_^Dbv5^u zB9n=*)k*tjHDRJ@$bp9mrh}qFn*s}npMl5BMDC%Hs0M0g-hW~P*3CNG06G!MOPEQ_ zi}Qs-6M8aMt;sL$vlmVBR^+Ry<64jrm1EI1%#j?c?4b*7>)a{aDw#TfTYKq+SjEFA z(aJ&z_0?0JB83D-i3Vh+o|XV4UP+YJ$9Boid2^M2en@APw&wx7vU~t$r2V`F|7Qfo z>WKgI@eNBZ-+Og<{u2ZiG%>YvH2L3fNpV9J;WLJoBZda)01Rn;o@){01{7E#ke(7U zHK>S#qZ(N=aoae*4X!0A{)nu0R_sKpi1{)u>GVjC+b5Jyl6#AoQ-1_3UDovNSo`T> z?c-@7XX*2GMy?k?{g)7?Sv;SJkmxYPJPs!&QqB12ejq`Lee^-cDveVWL^CTUldb(G zjDGe(O4P=S{4fF=#~oAu>LG>wrU^z_?3yt24FOx>}{^lCGh8?vtvY$^hbZ)9I0E3r3NOlb9I?F-Yc=r$*~l`4N^xzlV~N zl~#oc>U)Yjl0BxV>O*Kr@lKT{Z09OXt2GlvE38nfs+DD7exl|&vT;)>VFXJVZp9Np zDK}aO;R3~ag$X*|hRVY3OPax|PG`@_ESc8E!mHRByJbZQRS38V2F__7MW~sgh!a>98Q2%lUNFO=^xU52|?D=IK#QjwBky-C>zOWlsiiM&1n z;!&1((Xn1$9K}xabq~222gYvx3hnZPg}VMF_GV~5ocE=-v>V=T&RsLBo&`)DOyIj* zLV{h)JU_y*7SdRtDajP_Y+rBkNN*1_TXiKwHH2&p51d(#zv~s#HwbNy?<+(=9WBvo zw2hkk2Dj%kTFhY+$T+W-b7@qD!bkfN#Z2ng@Pd=i3-i?xYfs5Z*1hO?kd7Sp^9`;Y zM2jeGg<-nJD1er@Pc_cSY7wo5dzQX44=%6rn}P_SRbpzsA{6B+!$3B0#;}qwO37G^ zL(V_5JK`XT?OHVk|{_$vQ|oNEpab*BO4F zUTNQ7RUhnRsU`TK#~`)$icsvKh~(pl=3p6m98@k3P#~upd=k*u20SNcb{l^1rUa)>qO997)pYRWMncC8A&&MHlbW?7i^7M`+B$hH~Y|J zd>FYOGQ;j>Zc2e7R{KK7)0>>nn_jYJy&o@sK!4G>-rLKM8Hv)f;hi1D2fAc$+six2 zyVZ@wZ6x|fJ!4KrpCJY=!Mq0;)X)OoS~{Lkh6u8J`eK%u0WtKh6B>GW_)PVc zl}-k`p09qwGtZ@VbYJC!>29V?Dr>>vk?)o(x?!z*9DJ||9qG-&G~#kXxbw{KKYy}J zQKa-dPt~M~E}V?PhW0R26xdA%1T*%ra6SguGu50YHngOTIv)@N|YttEXo#OZfgtP7;H?EeZZxo<}3YlYxtBq znJ!WFR^tmGf0Py}N?kZ(#=VtpC@%xJkDmfcCoBTxq zr_|5gP?u1@vJZbxPZ|G0AW4=tpb84gM2DpJU||(b8kMOV1S3|(yuwZJ&rIiFW(U;5 zUtAW`O6F6Zy+eZ1EDuP~AAHlSY-+A_eI5Gx)%*uro5tljy}kCZU*_d7)oJ>oQSZ3* zneTn`{gnNC&uJd)0aMBzAg021?YJ~b(fmkwZAd696a=0NzBAqBN54KuNDwa*no(^O z6p05bioXUR^uXjpTol*ppHp%1v9e)vkoUAUJyBx3lw0UO39b0?^{}yb!$yca(@DUn zCquRF?t=Zb9`Ed3AI6|L{eX~ijVH`VzSMheKoP7LSSf4g>md>`yi!TkoG5P>Ofp+n z(v~rW+(5L96L{vBb^g51B=(o)?%%xhvT*A5btOpw(TKh^g^4c zw>0%X!_0`{iN%RbVk+A^f{w-4-SSf*fu@FhruNL##F~sF24O~u zyYF<3el2b$$wZ_|uW#@Ak+VAGk#e|kS8nL1g>2B-SNMjMp^8;-FfeofY2fphFHO!{ z*!o4oTb{4e;S<|JEs<1_hPsmAlVNk?_5-Fp5KKU&d#FiNW~Y+pVFk@Cua1I{T+1|+ zHx6rFMor)7L)krbilqsWwy@T+g3DiH5MyVf8Wy}XbEaoFIDr~y;@r&I>FMW{ z?Q+(IgyebZ)-i4jNoXQhq4Muy9Fv+OxU;9_Jmn+<`mEC#%2Q_2bpcgzcinygNI!&^ z=V$)o2&Yz04~+&pPWWn`rrWxJ&}8khR)6B(--!9Q zubo}h+1T)>a@c)H^i``@<^j?|r4*{;tQf78(xn0g39IoZw0(CwY1f<%F>kEaJ zp9u|IeMY5mRdAlw*+gSN^5$Q)ShM<~E=(c8QM+T-Qk)FyKz#Sw0EJ*edYcuOtO#~Cx^(M7w5 z3)rl#L)rF|(Vun2LkFr!rg8Q@=r>9p>(t3Gf_auiJ2Xx9HmxYTa|=MH_SUlYL`mz9 zTTS$`%;D-|Jt}AP1&k7PcnfFNTH0A-*FmxstjBDiZX?}%u%Yq94$fUT&z6od+(Uk> zuqsld#G(b$G8tus=M!N#oPd|PVFX)?M?tCD0tS%2IGTfh}3YA3f&UM)W$_GNV8 zQo+a(ml2Km4o6O%gKTCSDNq+#zCTIQ1*`TIJh~k6Gp;htHBFnne))rlFdGqwC6dx2+La1&Mnko*352k0y z+tQcwndQlX`nc6nb$A9?<-o|r*%aWXV#=6PQic0Ok_D;q>wbv&j7cKc!w4~KF#-{6 z(S%6Za)WpGIWf7jZ3svNG5OLs0>vCL9{V7cgO%zevIVMH{WgP*^D9ws&OqA{yr|m| zKD4*07dGXshJHd#e%x%J+qmS^lS|0Bp?{drv;{@{l9ArPO&?Q5=?OO9=}h$oVe#3b z3Yofj&Cb}WC$PxmRRS)H%&$1-)z7jELS}!u!zQ?A^Y{Tv4QVt*vd@uj-^t2fYRzQj zfxGR>-q|o$3sGn^#VzZ!QQx?h9`njeJry}@x?|k0-GTTA4y3t2E`3DZ!A~D?GiJup z)8%PK2^9OVRlP(24P^4_<|D=H^7}WlWu#LgsdHzB%cPy|f8dD3|A^mh4WXxhLTVu_ z@abE{6Saz|Y{rXYPd4$tfPYo}ef(oQWZ=4Bct-=_9`#Qgp4ma$n$`tOwq#&E18$B; z@Bp)bn3&rEi0>fWWZ@7k5WazfoX`SCO4jQWwVuo+$PmSZn^Hz?O(-tW@*DGxuf)V1 zO_xm&;NVCaHD4dqt(-MlszI3F-p?0!-e$fbiCeuaw66h^TTDLWuaV<@C-`=Xe5WL) zwooG7h>4&*)p3pKMS3O!4>-4jQUN}iAMQ)2*70?hP~)TzzR?-f@?Aqy$$1Iy8VGG$ zMM?8;j!pUX7QQD$gRc_#+=raAS577ga-w?jd`vCiN5lu)dEUkkUPl9!?{$IJNxQys z*E4e$eF&n&+AMRQR2gcaFEjAy*r)G!s(P6D&TfoApMFC_*Ftx0|D0@E-=B7tezU@d zZ{hGiN;YLIoSeRS;9o%dEua4b%4R3;$SugDjP$x;Z!M!@QibuSBb)HY!3zJ7M;^jw zlx6AD50FD&p3JyP*>o+t9YWW8(7P2t!VQQ21pHJOcG_SXQD;(5aX#M6x##5H_Re>6lPyDCjxr*R(+HE%c&QN+b^tbT zXBJk?p)zhJj#I?&Y2n&~XiytG9!1ox;bw5Rbj~)7c(MFBb4>IiRATdhg zmiEFlj@S_hwYYI(ki{}&<;_7(Z0Qkfq>am z&LtL=2qc7rWguk3BtE4zL41@#S;NN*-jWw|7Kx7H7~_%7fPt;TIX}Ubo>;Rmj94V> zNB1=;-9AR7s`Pxn}t_6^3ahlq53e&!Lh85uG zec0vJY_6e`tg7LgfrJ3k!DjR)Bi#L@DHIrZ`sK=<5O0Ip!fxGf*OgGSpP@Hbbe&$9 z;ZI}8lEoC2_7;%L2=w?tb%1oL0V+=Z`7b=P&lNGY;yVBazXRYu;+cQDKvm*7NCxu&i;zub zAJh#11%?w>E2rf2e~C4+rAb-&$^vsdACs7 z@|Ra!OfVM(ke{vyiqh7puf&Yp6cd6{DptUteYfIRWG3pI+5< zBVBI_xkBAc<(pcb$!Y%dTW(b;B;2pOI-(QCsLv@U-D1XJ z(Gk8Q3l7Ws46Aktuj>|s{$6zA&xCPuXL-kB`CgYMs}4IeyG*P51IDwW?8UNQd+$i~ zlxOPtSi5L|gJcF@DwmJA5Ju8HEJ>o{{upwIpb!f{2(vLNBw`7xMbvcw<^{Fj@E~1( z?w`iIMieunS#>nXlmUcSMU+D3rX28f?s7z;X=se6bo8;5vM|O^(D6{A9*ChnGH!RG zP##3>LDC3jZPE4PH32AxrqPk|yIIrq~`aL-=}`okhNu9aT%q z1b)7iJ)CN=V#Ly84N_r7U^SH2FGdE5FpTO2 z630TF$P>GNMu8`rOytb(lB2};`;P4YNwW1<5d3Q~AX#P0aX}R2b2)`rgkp#zTxcGj zAV^cvFbhP|JgWrq_e`~exr~sIR$6p5V?o4Wym3kQ3HA+;Pr$bQ0(PmADVO%MKL!^q z?zAM8j1l4jrq|5X+V!8S*2Wl@=7*pPgciTVK6kS1Ge zMsd_u6DFK$jTnvVtE;qa+8(1sGBu~n&F%dh(&c(Zs4Fc#A=gG^^%^AyH}1^?|8quj zl@Z47h$){PlELJgYZCIHHL= z{U8O>Tw4x3<1{?$8>k-P<}1y9DmAZP_;(3Y*{Sk^H^A=_iSJ@+s5ktgwTXz_2$~W9>VVZsfwCm@s0sQ zeB50_yu@uS+e7QoPvdCwDz{prjo(AFwR%C?z`EL{1`|coJHQTk^nX=tvs1<0arUOJ z!^`*x&&BvTYmemyZ)2p~{%eYX=JVR?DYr(rNgqRMA5E1PR1Iw=prk=L2ldy3r3Vg@27IZx43+ywyzr-X*p*d@tZV+!U#~$-q=8c zgdSuh#r?b4GhEGNai)ayHQpk>5(%j5c@C1K3(W1pb~HeHpaqijJZa-e6vq_8t-^M^ zBJxq|MqZc?pjXPIH}70a5vt!IUh;l}<>VX<-Qcv^u@5(@@M2CHSe_hD$VG-eiV^V( zj7*9T0?di?P$FaD6oo?)<)QT>Npf6Og!GO^GmPV(Km0!=+dE&bk#SNI+C9RGQ|{~O*VC+tXK3!n`5 zHfl6>lwf_aEVV3`0T!aHNZLsj$paS$=LL(?b!Czaa5bbSuZ6#$_@LK<(7yrrl+80| z{tOFd=|ta2Z`^ssozD9BINn45NxUeCQis?-BKmU*Kt=FY-NJ+)8S1ecuFtN-M?&42 zl2$G>u!iNhAk*HoJ^4v^9#ORYp5t^wDj6|lx~5w45#E5wVqI1JQ~9l?nPp1YINf++ zMAdSif~_ETv@Er(EFBI^@L4BULFW>)NI+ejHFP*T}UhWNN`I)RRS8za? z*@`1>9ZB}An%aT5K=_2iQmfE;GcBVHLF!$`I99o5GO`O%O_zLr9AG18>&^HkG(;=V z%}c!OBQ~?MX(9h~tajX{=x)+!cbM7$YzTlmsPOdp2L-?GoW`@{lY9U3f;OUo*BwRB z8A+nv(br0-SH#VxGy#ZrgnGD(=@;HME;yd46EgWJ`EL%oXc&lFpc@Y}^>G(W>h_v_ zlN!`idhX+OjL+~T?19sroAFVGfa5tX-D49w$1g2g_-T|EpHL6}K_aX4$K=LTvwtlF zL*z}j{f+Uoe7{-px3_5iKPA<_7W=>Izkk)!l9ez2w%vi(?Y;i8AxRNLSOGDzNoqoI zP!1uAl}r=_871(G?y`i&)-7{u=%nxk7CZ_Qh#!|ITec zwQn`33GTUM`;D2POWnkqngqJhJRlM>CTONzTG}>^Q0wUunQyn|TAiHzyX2_%ATx%P z%7gW)%4rA9^)M<_%k@`Y?RbC<29sWU&5;@|9thf2#zf8z12$hRcZ!CSb>kUp=4N#y zl3hE#y6>kkA8VY2`W`g5Ip?2qC_BY$>R`iGQLhz2-S>x(RuWv)SPaGdl^)gGw7tjR zH@;jwk!jIaCgSg_*9iF|a);sRUTq30(8I(obh^|}S~}P4U^BIGYqcz;MPpC~Y@k_m zaw4WG1_vz2GdCAX!$_a%GHK**@IrHSkGoN>)e}>yzUTm52on`hYot7cB=oA-h1u|R ztH$11t?54Qg2L+i33FPFKKRm1aOjKST{l1*(nps`>sv%VqeVMWjl5+Gh+9);hIP8? zA@$?}Sc z3qIRpba+y5yf{R6G(u8Z^vkg0Fu&D-7?1s=QZU`Ub{-!Y`I?AGf1VNuc^L3v>)>i# z{DV9W$)>34wnzAXUiV^ZpYKw>UElrN_5Xj6{r_3| z$X5PK`e5$7>~9Dj7gK5ash(dvs`vwfk}&RD`>04;j62zoXESkFBklYaKm5seyiX(P zqQ-;XxlV*yg?Dhlx%xt!b0N3GHp@(p$A;8|%# zZ5m2KL|{on4nr>2_s9Yh=r5ScQ0;aMF)G$-9-Ca6%wA`Pa)i?NGFA|#Yi?{X-4ZO_ z^}%7%vkzvUHa$-^Y#aA+aiR5sa%S|Ebyn`EV<3Pc?ax_f>@sBZF1S;7y$CXd5t5=WGsTKBk8$OfH4v|0?0I=Yp}7c=WBSCg!{0n)XmiU;lfx)**zZaYqmDJelxk$)nZyx5`x$6R|fz(;u zEje5Dtm|a%zK!!tk3{i9$I2b{vXNFy%Bf{50X!x{98+BsDr_u9i>G5%*sqEX|06J0 z^IY{UcEbj6LDwuMh7cH`H@9sVt1l1#8kEQ(LyT@&+K}(ReE`ux8gb0r6L_#bDUo^P z3Ka2lRo52Hdtl_%+pwVs14=q`{d^L58PsU@AMf(hENumaxM{7iAT5sYmWh@hQCO^ zK&}ijo=`VqZ#a3vE?`7QW0ZREL17ZvDfdqKGD?0D4fg{7v%|Yj&_jcKJAB)>=*RS* zto8p6@k%;&^ZF>hvXm&$PCuEp{uqw3VPG$9VMdW5$w-fy2CNNT>E;>ejBgy-m_6`& z97L1p{%srn@O_JQgFpa_#f(_)eb#YS>o>q3(*uB;uZb605(iqM$=NK{nHY=+X2*G) zO3-_Xh%aG}fHWe*==58zBwp%&`mge<8uq8;xIxOd=P%9EK!34^E9sk|(Zq1QSz-JVeP12Fp)-`F|KY$LPwUE?rku zY@OJ)Z9A!ojfzfeyJ9;zv2EM7ZQB)AR5xGa-tMn^bl)FmoIiVyJ@!~@%{}qXXD&Ns zPnfe5U+&ohKefILu_1mPfLGuapX@btta5C#gPB2cjk5m4T}Nfi+Vfka!Yd(L?-c~5 z#ZK4VeQEXNPc4r$K00Fg>g#_W!YZ)cJ?JTS<&68_$#cZT-ME`}tcwqg3#``3M3UPvn+pi}(VNNx6y zFIMVb6OwYU(2`at$gHba*qrMVUl8xk5z-z~fb@Q3Y_+aXuEKH}L+>eW__!IAd@V}L zkw#s%H0v2k5-=vh$^vPCuAi22Luu3uKTf6fPo?*nvj$9(u)4$6tvF-%IM+3pt*cgs z_?wW}J7VAA{_~!?))?s6{M=KPpVhg4fNuU*|3THp@_(q!b*hdl{fjRVFWtu^1dV(f z6iOux9hi&+UK=|%M*~|aqFK{Urfl!TA}UWY#`w(0P!KMe1Si{8|o))Gy6d7;!JQYhgMYmXl?3FfOM2nQGN@~Ap6(G z3+d_5y@=nkpKAhRqf{qQ~k7Z$v&l&@m7Ppt#FSNzKPZM z8LhihcE6i=<(#87E|Wr~HKvVWhkll4iSK$^mUHaxgy8*K$_Zj;zJ`L$naPj+^3zTi z-3NTaaKnD5FPY-~?Tq6QHnmDDRxu0mh0D|zD~Y=vv_qig5r-cIbCpxlju&8Sya)@{ zsmv6XUSi)@(?PvItkiZEeN*)AE~I_?#+Ja-r8$(XiXei2d@Hi7Rx8+rZZb?ZLa{;@*EHeRQ-YDadz~M*YCM4&F-r;E#M+@CSJMJ0oU|PQ^ z=E!HBJDMQ2TN*Y(Ag(ynAL8%^v;=~q?s4plA_hig&5Z0x_^Oab!T)@6kRN$)qEJ6E zNuQjg|G7iwU(N8pI@_6==0CL;lRh1dQF#wePhmu@hADFd3B5KIH#dx(2A zp~K&;Xw}F_N6CU~0)QpQk7s$a+LcTOj1%=WXI(U=Dv!6 z{#<#-)2+gCyyv=Jw?Ab#PVkxPDeH|sAxyG`|Ys}A$PW4TdBv%zDz z^?lwrxWR<%Vzc8Sgt|?FL6ej_*e&rhqJZ3Y>k=X(^dytycR;XDU16}Pc9Vn0>_@H+ zQ;a`GSMEG64=JRAOg%~L)x*w{2re6DVprNp+FcNra4VdNjiaF0M^*>CdPkt(m150rCue?FVdL0nFL$V%5y6N z%eLr5%YN7D06k5ji5*p4v$UMM)G??Q%RB27IvH7vYr_^3>1D-M66#MN8tWGw>WED} z5AhlsanO=STFYFs)Il_0i)l)f<8qn|$DW7ZXhf5xI;m+7M5-%P63XFQrG9>DMqHc} zsgNU9nR`b}E^mL5=@7<1_R~j@q_2U^3h|+`7YH-?C=vme1C3m`Fe0HC>pjt6f_XMh zy~-i-8R46QNYneL4t@)<0VU7({aUO?aH`z4V2+kxgH5pYD5)wCh75JqQY)jIPN=U6 z+qi8cGiOtXG2tXm;_CfpH9ESCz#i5B(42}rBJJF$jh<1sbpj^8&L;gzGHb8M{of+} zzF^8VgML2O9nxBW7AvdEt90vp+#kZxWf@A)o9f9}vKJy9NDBjBW zSt=Hcs=YWCwnfY1UYx*+msp{g!w0HC<_SM!VL1(I2PE?CS}r(eh?{I)mQixmo5^p# zV?2R!R@3GV6hwTCrfHiK#3Orj>I!GS2kYhk1S;aFBD_}u2v;0HYFq}Iz1Z(I4oca4 zxquja8$+8JW_EagDHf$a1OTk5S97umGSDaj)gH=fLs9>_=XvVj^Xj9a#gLdk=&3tl zfmK9MNnIX9v{?%xdw7568 zNrZ|roYs(vC4pHB5RJ8>)^*OuyNC>x7ad)tB_}3SgQ96+-JT^Qi<`xi=)_=$Skwv~ zdqeT9Pa`LYvCAn&rMa2aCDV(TMI#PA5g#RtV|CWpgDYRA^|55LLN^uNh*gOU>Z=a06qJ;$C9z8;n-Pq=qZnc1zUwJ@t)L;&NN+E5m zRkQ(SeM8=l-aoAKGKD>!@?mWTW&~)uF2PYUJ;tB^my`r9n|Ly~0c%diYzqs9W#FTjy?h&X3TnH zXqA{QI82sdjPO->f=^K^f>N`+B`q9&rN0bOXO79S&a9XX8zund(kW7O76f4dcWhIu zER`XSMSFbSL>b;Rp#`CuGJ&p$s~G|76){d?xSA5wVg##_O0DrmyEYppyBr%fyWbbv zp`K84JwRNP$d-pJ!Qk|(RMr?*!wi1if-9G#0p>>1QXKXWFy)eB3ai)l3601q8!9JC zvU#ZWWDNKq9g6fYs?JQ)Q4C_cgTy3FhgKb8s&m)DdmL5zhNK#8wWg!J*7G7Qhe9VU zha?^AQTDpYcuN!B+#1dE*X{<#!M%zfUQbj=zLE{dW0XeQ7-oIsGY6RbkP2re@Q{}r_$iiH0xU%iN*ST`A)-EH6eaZB$GA#v)cLi z*MpA(3bYk$oBDKAzu^kJoSUsDd|856DApz={3u8sbQV@JnRkp2nC|)m;#T=DvIL-O zI4vh;g7824l}*`_p@MT4+d`JZ2%6NQh=N9bmgJ#q!hK@_<`HQq3}Z8Ij>3%~<*= zcv=!oT#5xmeGI92lqm9sGVE%#X$ls;St|F#u!?5Y7syhx6q#MVRa&lBmmn%$C0QzU z);*ldgwwCmzM3uglr}!Z2G+?& zf%Dpo&mD%2ZcNFiN-Z0f;c_Q;A%f@>26f?{d1kxIJD}LxsQkB47SAdwinfMILZdN3 zfj^HmTzS3Ku5BxY>ANutS8WPQ-G>v4^_Qndy==P3pDm+Xc?>rUHl-4+^%Sp5atOja z2oP}ftw-rqnb}+khR3CrRg^ibi6?QYk1*i^;kQGirQ=uB9Sd1NTfT-Rbv;hqnY4neE5H1YUrjS2m+2&@uXiAo- zrKUX|Ohg7(6F(AoP~tj;NZlV#xsfo-5reuQHB$&EIAhyZk;bL;k9ouDmJNBAun;H& zn;Of1z_Qj`x&M;5X;{s~iGzBQTY^kv-k{ksbE*Dl%Qf%N@hQCfY~iUw!=F-*$cpf2 z3wix|aLBV0b;W@z^%7S{>9Z^T^fLOI68_;l@+Qzaxo`nAI8emTV@rRhEKZ z?*z_{oGdI~R*#<2{bkz$G~^Qef}$*4OYTgtL$e9q!FY7EqxJ2`zk6SQc}M(k(_MaV zSLJnTXw&@djco1~a(vhBl^&w=$fa9{Sru>7g8SHahv$&Bl(D@(Zwxo_3r=;VH|uc5 zi1Ny)J!<(KN-EcQ(xlw%PNwK8U>4$9nVOhj(y0l9X^vP1TA>r_7WtSExIOsz`nDOP zs}d>Vxb2Vo2e5x8p(n~Y5ggAyvib>d)6?)|E@{FIz?G3PVGLf7-;BxaP;c?7ddH$z zA+{~k^V=bZuXafOv!RPsE1GrR3J2TH9uB=Z67gok+u`V#}BR86hB1xl}H4v`F+mRfr zYhortD%@IGfh!JB(NUNSDh+qDz?4ztEgCz&bIG-Wg7w-ua4ChgQR_c+z8dT3<1?uX z*G(DKy_LTl*Ea!%v!RhpCXW1WJO6F`bgS-SB;Xw9#! z<*K}=#wVu9$`Yo|e!z-CPYH!nj7s9dEPr-E`DXUBu0n!xX~&|%#G=BeM?X@shQQMf zMvr2!y7p_gD5-!Lnm|a@z8Of^EKboZsTMk%5VsJEm>VsJ4W7Kv{<|#4f-qDE$D-W>gWT%z-!qXnDHhOvLk=?^a1*|0j z{pW{M0{#1VcR5;F!!fIlLVNh_Gj zbnW(_j?0c2q$EHIi@fSMR{OUKBcLr{Y&$hrM8XhPByyZaXy|dd&{hYQRJ9@Fn%h3p7*VQolBIV@Eq`=y%5BU~3RPa^$a?ixp^cCg z+}Q*X+CW9~TL29@OOng(#OAOd!)e$d%sr}^KBJ-?-X&|4HTmtemxmp?cT3uA?md4% zT8yZ0U;6Rg6JHy3fJae{6TMGS?ZUX6+gGTT{Q{)SI85$5FD{g-eR%O0KMpWPY`4@O zx!hen1*8^E(*}{m^V_?}(b5k3hYo=T+$&M32+B`}81~KKZhY;2H{7O-M@vbCzuX0n zW-&HXeyr1%I3$@ns-V1~Lb@wIpkmx|8I~ob1Of7i6BTNysEwI}=!nU%q7(V_^+d*G z7G;07m(CRTJup!`cdYi93r^+LY+`M*>aMuHJm(A8_O8C#A*$!Xvddgpjx5)?_EB*q zgE8o5O>e~9IiSC@WtZpF{4Bj2J5eZ>uUzY%TgWF7wdDE!fSQIAWCP)V{;HsU3ap?4 znRsiiDbtN7i9hapO;(|Ew>Ip2TZSvK9Z^N21%J?OiA_&eP1{(Pu_=%JjKy|HOardq ze?zK^K zA%sjF64*Wufad%H<) z^|t>e*h+Z1#l=5wHexzt9HNDNXgM=-OPWKd^5p!~%SIl>Fo&7BvNpbf8{NXmH)o{r zO=aBJ;meX1^{O%q;kqdw*5k!Y7%t_30 zy{nGRVc&5qt?dBwLs+^Sfp;f`YVMSB#C>z^a9@fpZ!xb|b-JEz1LBX7ci)V@W+kvQ89KWA0T~Lj$aCcfW#nD5bt&Y_< z-q{4ZXDqVg?|0o)j1%l0^_it0WF*LCn-+)c!2y5yS7aZIN$>0LqNnkujV*YVes(v$ zY@_-!Q;!ZyJ}Bg|G-~w@or&u0RO?vlt5*9~yeoPV_UWrO2J54b4#{D(D>jF(R88u2 zo#B^@iF_%S>{iXSol8jpmsZuJ?+;epg>k=$d`?GSegAVp3n$`GVDvK${N*#L_1`44 z{w0fL{2%)0|E+qgZtjX}itZz^KJt4Y;*8uSK}Ft38+3>j|K(PxIXXR-t4VopXo#9# zt|F{LWr-?34y`$nLBVV_*UEgA6AUI65dYIbqpNq9cl&uLJ0~L}<=ESlOm?Y-S@L*d z<7vt}`)TW#f%Rp$Q}6@3=j$7Tze@_uZO@aMn<|si{?S}~maII`VTjs&?}jQ4_cut9$)PEqMukwoXobzaKx^MV z2fQwl+;LSZ$qy%Tys0oo^K=jOw$!YwCv^ei4NBVauL)tN%=wz9M{uf{IB(BxK|lT*pFkmNK_1tV`nb%jH=a0~VNq2RCKY(rG7jz!-D^k)Ec)yS%17pE#o6&eY+ z^qN(hQT$}5F(=4lgNQhlxj?nB4N6ntUY6(?+R#B?W3hY_a*)hnr4PA|vJ<6p`K3Z5Hy z{{8(|ux~NLUW=!?9Qe&WXMTAkQnLXg(g=I@(VG3{HE13OaUT|DljyWXPs2FE@?`iU z4GQlM&Q=T<4&v@Fe<+TuXiZQT3G~vZ&^POfmI1K2h6t4eD}Gk5XFGpbj1n_g*{qmD6Xy z`6Vv|lLZtLmrnv*{Q%xxtcWVj3K4M%$bdBk_a&ar{{GWyu#ljM;dII;*jP;QH z#+^o-A4np{@|Mz+LphTD0`FTyxYq#wY)*&Ls5o{0z9yg2K+K7ZN>j1>N&;r+Z`vI| zDzG1LJZ+sE?m?>x{5LJx^)g&pGEpY=fQ-4}{x=ru;}FL$inHemOg%|R*ZXPodU}Kh zFEd5#+8rGq$Y<_?k-}r5zgQ3jRV=ooHiF|@z_#D4pKVEmn5CGV(9VKCyG|sT9nc=U zEoT67R`C->KY8Wp-fEcjjFm^;Cg(ls|*ABVHq8clBE(;~K^b+S>6uj70g? z&{XQ5U&!Z$SO7zfP+y^8XBbiu*Cv-yJG|l-oe*!s5$@Lh_KpxYL2sx`B|V=dETN>5K+C+CU~a_3cI8{vbu$TNVdGf15*>D zz@f{zIlorkY>TRh7mKuAlN9A0>N>SV`X)+bEHms=mfYTMWt_AJtz_h+JMmrgH?mZt zm=lfdF`t^J*XLg7v+iS)XZROygK=CS@CvUaJo&w2W!Wb@aa?~Drtf`JV^cCMjngVZ zv&xaIBEo8EYWuML+vxCpjjY^s1-ahXJzAV6hTw%ZIy!FjI}aJ+{rE&u#>rs)vzuxz z+$5z=7W?zH2>Eb32dvgHYZtCAf!=OLY-pb4>Ae79rd68E2LkVPj-|jFeyqtBCCwiW zkB@kO_(3wFq)7qwV}bA=zD!*@UhT`geq}ITo%@O(Z5Y80nEX~;0-8kO{oB6|(4fQh z);73T!>3@{ZobPwRv*W?7m0Ml9GmJBCJd&6E?hdj9lV= z4flNfsc(J*DyPv?RCOx!MSvk(M952PJ-G|JeVxWVjN~SNS6n-_Ge3Q;TGE;EQvZg86%wZ`MB zSMQua(i*R8a75!6$QRO^(o7sGoomb+Y{OMy;m~Oa`;P9Yqo>?bJAhqXxLr7_3g_n>f#UVtxG!^F#1+y@os6x(sg z^28bsQ@8rw%Gxk-stAEPRbv^}5sLe=VMbkc@Jjimqjvmd!3E7+QnL>|(^3!R} zD-l1l7*Amu@j+PWLGHXXaFG0Ct2Q=}5YNUxEQHCAU7gA$sSC<5OGylNnQUa>>l%sM zyu}z6i&({U@x^hln**o6r2s-(C-L50tQvz|zHTqW!ir?w&V23tuYEDJVV#5pE|OJu z7^R!A$iM$YCe?8n67l*J-okwfZ+ZTkGvZ)tVPfR;|3gyFjF)8V zyXXN=!*bpyRg9#~Bg1+UDYCt0 ztp4&?t1X0q>uz;ann$OrZs{5*r`(oNvw=$7O#rD|Wuv*wIi)4b zGtq4%BX+kkagv3F9Id6~-c+1&?zny%w5j&nk9SQfo0k4LhdSU_kWGW7axkfpgR`8* z!?UTG*Zi_baA1^0eda8S|@&F z{)Rad0kiLjB|=}XFJhD(S3ssKlveFFmkN{Vl^_nb!o5M!RC=m)V&v2%e?ZoRC@h3> zJ(?pvToFd`*Zc@HFPL#=otWKwtuuQ_dT-Hr{S%pQX<6dqVJ8;f(o)4~VM_kEQkMR+ zs1SCVi~k>M`u1u2xc}>#D!V&6nOOh-E$O&SzYrjJdZpaDv1!R-QGA141WjQe2s0J~ zQ;AXG)F+K#K8_5HVqRoRM%^EduqOnS(j2)|ctA6Q^=|s_WJYU;Z%5bHp08HPL`YF2 zR)Ad1z{zh`=sDs^&V}J z%$Z$!jd7BY5AkT?j`eqMs%!Gm@T8)4w3GYEX~IwgE~`d|@T{WYHkudy(47brgHXx& zBL1yFG6!!!VOSmDxBpefy2{L_u5yTwja&HA!mYA#wg#bc-m%~8aRR|~AvMnind@zs zy>wkShe5&*un^zvSOdlVu%kHsEo>@puMQ`b1}(|)l~E{5)f7gC=E$fP(FC2=F<^|A zxeIm?{EE!3sO!Gr7e{w)Dx(uU#3WrFZ>ibmKSQ1tY?*-Nh1TDHLe+k*;{Rp!Bmd_m zb#^kh`Y*8l|9Cz2e{;RL%_lg{#^Ar+NH|3z*Zye>!alpt{z;4dFAw^^H!6ING*EFc z_yqhr8d!;%nHX9AKhFQZBGrSzfzYCi%C!(Q5*~hX>)0N`vbhZ@N|i;_972WSx*>LH z87?en(;2_`{_JHF`Sv6Wlps;dCcj+8IJ8ca6`DsOQCMb3n# z3)_w%FuJ3>fjeOOtWyq)ag|PmgQbC-s}KRHG~enBcIwqIiGW8R8jFeBNY9|YswRY5 zjGUxdGgUD26wOpwM#8a!Nuqg68*dG@VM~SbOroL_On0N6QdT9?)NeB3@0FCC?Z|E0 z6TPZj(AsPtwCw>*{eDEE}Gby>0q{*lI+g2e&(YQrsY&uGM{O~}(oM@YWmb*F zA0^rr5~UD^qmNljq$F#ARXRZ1igP`MQx4aS6*MS;Ot(1L5jF2NJ;de!NujUYg$dr# z=TEL_zTj2@>ZZN(NYCeVX2==~=aT)R30gETO{G&GM4XN<+!&W&(WcDP%oL8PyIVUC zs5AvMgh6qr-2?^unB@mXK*Dbil^y-GTC+>&N5HkzXtozVf93m~xOUHn8`HpX=$_v2 z61H;Z1qK9o;>->tb8y%#4H)765W4E>TQ1o0PFj)uTOPEvv&}%(_mG0ISmyhnQV33Z$#&yd{ zc{>8V8XK$3u8}04CmAQ#I@XvtmB*s4t8va?-IY4@CN>;)mLb_4!&P3XSw4pA_NzDb zORn!blT-aHk1%Jpi>T~oGLuh{DB)JIGZ9KOsciWs2N7mM1JWM+lna4vkDL?Q)z_Ct z`!mi0jtr+4*L&N7jk&LodVO#6?_qRGVaucqVB8*us6i3BTa^^EI0x%EREQSXV@f!lak6Wf1cNZ8>*artIJ(ADO*=<-an`3zB4d*oO*8D1K!f z*A@P1bZCNtU=p!742MrAj%&5v%Xp_dSX@4YCw%F|%Dk=u|1BOmo)HsVz)nD5USa zR~??e61sO(;PR)iaxK{M%QM_rIua9C^4ppVS$qCT9j2%?*em?`4Z;4@>I(c%M&#cH z>4}*;ej<4cKkbCAjjDsyKS8rIm90O)Jjgyxj5^venBx&7B!xLmzxW3jhj7sR(^3Fz z84EY|p1NauwXUr;FfZjdaAfh%ivyp+^!jBjJuAaKa!yCq=?T_)R!>16?{~p)FQ3LDoMyG%hL#pR!f@P%*;#90rs_y z@9}@r1BmM-SJ#DeuqCQk=J?ixDSwL*wh|G#us;dd{H}3*-Y7Tv5m=bQJMcH+_S`zVtf;!0kt*(zwJ zs+kedTm!A}cMiM!qv(c$o5K%}Yd0|nOd0iLjus&;s0Acvoi-PFrWm?+q9f^FslxGi z6ywB`QpL$rJzWDg(4)C4+!2cLE}UPCTBLa*_=c#*$b2PWrRN46$y~yST3a2$7hEH= zNjux+wna^AzQ=KEa_5#9Ph=G1{S0#hh1L3hQ`@HrVnCx{!fw_a0N5xV(iPdKZ-HOM za)LdgK}1ww*C_>V7hbQnTzjURJL`S%`6nTHcgS+dB6b_;PY1FsrdE8(2K6FN>37!62j_cBlui{jO^$dPkGHV>pXvW0EiOA zqW`YaSUBWg_v^Y5tPJfWLcLpsA8T zG)!x>pKMpt!lv3&KV!-um= zKCir6`bEL_LCFx4Z5bAFXW$g3Cq`?Q%)3q0r852XI*Der*JNuKUZ`C{cCuu8R8nkt z%pnF>R$uY8L+D!V{s^9>IC+bmt<05h**>49R*#vpM*4i0qRB2uPbg8{{s#9yC;Z18 zD7|4m<9qneQ84uX|J&f-g8a|nFKFt34@Bt{CU`v(SYbbn95Q67*)_Esl_;v291s=9 z+#2F2apZU4Tq=x+?V}CjwD(P=U~d<=mfEFuyPB`Ey82V9G#Sk8H_Ob_RnP3s?)S_3 zr%}Pb?;lt_)Nf>@zX~D~TBr;-LS<1I##8z`;0ZCvI_QbXNh8Iv)$LS=*gHr;}dgb=w5$3k2la1keIm|=7<-JD>)U%=Avl0Vj@+&vxn zt-)`vJxJr88D&!}2^{GPXc^nmRf#}nb$4MMkBA21GzB`-Or`-3lq^O^svO7Vs~FdM zv`NvzyG+0T!P8l_&8gH|pzE{N(gv_tgDU7SWeiI-iHC#0Ai%Ixn4&nt{5y3(GQs)i z&uA;~_0shP$0Wh0VooIeyC|lak__#KVJfxa7*mYmZ22@(<^W}FdKjd*U1CqSjNKW% z*z$5$=t^+;Ui=MoDW~A7;)Mj%ibX1_p4gu>RC}Z_pl`U*{_z@+HN?AF{_W z?M_X@o%w8fgFIJ$fIzBeK=v#*`mtY$HC3tqw7q^GCT!P$I%=2N4FY7j9nG8aIm$c9 zeKTxVKN!UJ{#W)zxW|Q^K!3s;(*7Gbn;e@pQBCDS(I|Y0euK#dSQ_W^)sv5pa%<^o zyu}3d?Lx`)3-n5Sy9r#`I{+t6x%I%G(iewGbvor&I^{lhu-!#}*Q3^itvY(^UWXgvthH52zLy&T+B)Pw;5>4D6>74 zO_EBS)>l!zLTVkX@NDqyN2cXTwsUVao7$HcqV2%t$YzdAC&T)dwzExa3*kt9d(}al zA~M}=%2NVNUjZiO7c>04YH)sRelXJYpWSn^aC$|Ji|E13a^-v2MB!Nc*b+=KY7MCm zqIteKfNkONq}uM;PB?vvgQvfKLPMB8u5+Am=d#>g+o&Ysb>dX9EC8q?D$pJH!MTAqa=DS5$cb+;hEvjwVfF{4;M{5U&^_+r zvZdu_rildI!*|*A$TzJ&apQWV@p{!W`=?t(o0{?9y&vM)V)ycGSlI3`;ps(vf2PUq zX745#`cmT*ra7XECC0gKkpu2eyhFEUb?;4@X7weEnLjXj_F~?OzL1U1L0|s6M+kIhmi%`n5vvDALMagi4`wMc=JV{XiO+^ z?s9i7;GgrRW{Mx)d7rj)?(;|b-`iBNPqdwtt%32se@?w4<^KU&585_kZ=`Wy^oLu9 z?DQAh5z%q;UkP48jgMFHTf#mj?#z|=w= z(q6~17Vn}P)J3M?O)x))%a5+>TFW3No~TgP;f}K$#icBh;rSS+R|}l鯊%1Et zwk~hMkhq;MOw^Q5`7oC{CUUyTw9x>^%*FHx^qJw(LB+E0WBX@{Ghw;)6aA-KyYg8p z7XDveQOpEr;B4je@2~usI5BlFadedX^ma{b{ypd|RNYqo#~d*mj&y`^iojR}s%~vF z(H!u`yx68D1Tj(3(m;Q+Ma}s2n#;O~bcB1`lYk%Irx60&-nWIUBr2x&@}@76+*zJ5 ze&4?q8?m%L9c6h=J$WBzbiTf1Z-0Eb5$IZs>lvm$>1n_Mezp*qw_pr8<8$6f)5f<@ zyV#tzMCs51nTv_5ca`x`yfE5YA^*%O_H?;tWYdM_kHPubA%vy47i=9>Bq) zRQ&0UwLQHeswmB1yP)+BiR;S+Vc-5TX84KUA;8VY9}yEj0eESSO`7HQ4lO z4(CyA8y1G7_C;6kd4U3K-aNOK!sHE}KL_-^EDl(vB42P$2Km7$WGqNy=%fqB+ zSLdrlcbEH=T@W8V4(TgoXZ*G1_aq$K^@ek=TVhoKRjw;HyI&coln|uRr5mMOy2GXP zwr*F^Y|!Sjr2YQXX(Fp^*`Wk905K%$bd03R4(igl0&7IIm*#f`A!DCarW9$h$z`kYk9MjjqN&5-DsH@8xh63!fTNPxWsFQhNv z#|3RjnP$Thdb#Ys7M+v|>AHm0BVTw)EH}>x@_f4zca&3tXJhTZ8pO}aN?(dHo)44Z z_5j+YP=jMlFqwvf3lq!57-SAuRV2_gJ*wsR_!Y4Z(trO}0wmB9%f#jNDHPdQGHFR; zZXzS-$`;7DQ5vF~oSgP3bNV$6Z(rwo6W(U07b1n3UHqml>{=6&-4PALATsH@Bh^W? z)ob%oAPaiw{?9HfMzpGb)@Kys^J$CN{uf*HX?)z=g`J(uK1YO^8~s1(ZIbG%Et(|q z$D@_QqltVZu9Py4R0Ld8!U|#`5~^M=b>fnHthzKBRr=i+w@0Vr^l|W;=zFT#PJ?*a zbC}G#It}rQP^Ait^W&aa6B;+0gNvz4cWUMzpv(1gvfw-X4xJ2Sv;mt;zb2Tsn|kSS zo*U9N?I{=-;a-OybL4r;PolCfiaL=y@o9{%`>+&FI#D^uy#>)R@b^1ue&AKKwuI*` zx%+6r48EIX6nF4o;>)zhV_8(IEX})NGU6Vs(yslrx{5fII}o3SMHW7wGtK9oIO4OM&@@ECtXSICLcPXoS|{;=_yj>hh*%hP27yZwOmj4&Lh z*Nd@OMkd!aKReoqNOkp5cW*lC)&C$P?+H3*%8)6HcpBg&IhGP^77XPZpc%WKYLX$T zsSQ$|ntaVVOoRat$6lvZO(G-QM5s#N4j*|N_;8cc2v_k4n6zx9c1L4JL*83F-C1Cn zaJhd;>rHXB%%ZN=3_o3&Qd2YOxrK~&?1=UuN9QhL$~OY-Qyg&})#ez*8NpQW_*a&kD&ANjedxT0Ar z<6r{eaVz3`d~+N~vkMaV8{F?RBVemN(jD@S8qO~L{rUw#=2a$V(7rLE+kGUZ<%pdr z?$DP|Vg#gZ9S}w((O2NbxzQ^zTot=89!0^~hE{|c9q1hVzv0?YC5s42Yx($;hAp*E zyoGuRyphQY{Q2ee0Xx`1&lv(l-SeC$NEyS~8iil3_aNlnqF_G|;zt#F%1;J)jnPT& z@iU0S;wHJ2$f!juqEzPZeZkjcQ+Pa@eERSLKsWf=`{R@yv7AuRh&ALRTAy z8=g&nxsSJCe!QLchJ=}6|LshnXIK)SNd zRkJNiqHwKK{SO;N5m5wdL&qK`v|d?5<4!(FAsDxR>Ky#0#t$8XCMptvNo?|SY?d8b z`*8dVBlXTUanlh6n)!EHf2&PDG8sXNAt6~u-_1EjPI1|<=33T8 zEnA00E!`4Ave0d&VVh0e>)Dc}=FfAFxpsC1u9ATfQ`-Cu;mhc8Z>2;uyXtqpLb7(P zd2F9<3cXS} znMg?{&8_YFTGRQZEPU-XPq55%51}RJpw@LO_|)CFAt62-_!u_Uq$csc+7|3+TV_!h z+2a7Yh^5AA{q^m|=KSJL+w-EWDBc&I_I1vOr^}P8i?cKMhGy$CP0XKrQzCheG$}G# zuglf8*PAFO8%xop7KSwI8||liTaQ9NCAFarr~psQt)g*pC@9bORZ>m`_GA`_K@~&% zijH0z;T$fd;-Liw8%EKZas>BH8nYTqsK7F;>>@YsE=Rqo?_8}UO-S#|6~CAW0Oz1} z3F(1=+#wrBJh4H)9jTQ_$~@#9|Bc1Pd3rAIA_&vOpvvbgDJOM(yNPhJJq2%PCcMaI zrbe~toYzvkZYQ{ea(Wiyu#4WB#RRN%bMe=SOk!CbJZv^m?Flo5p{W8|0i3`hI3Np# zvCZqY%o258CI=SGb+A3yJe~JH^i{uU`#U#fvSC~rWTq+K`E%J@ zasU07&pB6A4w3b?d?q}2=0rA#SA7D`X+zg@&zm^iA*HVi z009#PUH<%lk4z~p^l0S{lCJk1Uxi=F4e_DwlfHA`X`rv(|JqWKAA5nH+u4Da+E_p+ zVmH@lg^n4ixs~*@gm_dgQ&eDmE1mnw5wBz9Yg?QdZwF|an67Xd*x!He)Gc8&2!urh z4_uXzbYz-aX)X1>&iUjGp;P1u8&7TID0bTH-jCL&Xk8b&;;6p2op_=y^m@Nq*0{#o!!A;wNAFG@0%Z9rHo zcJs?Th>Ny6+hI`+1XoU*ED$Yf@9f91m9Y=#N(HJP^Y@ZEYR6I?oM{>&Wq4|v0IB(p zqX#Z<_3X(&{H+{3Tr|sFy}~=bv+l=P;|sBz$wk-n^R`G3p0(p>p=5ahpaD7>r|>pm zv;V`_IR@tvZreIuv2EM7ZQHhO+qUgw#kOs%*ekY^n|=1#x9&c;Ro&I~{rG-#_3ZB1 z?|9}IFdbP}^DneP*T-JaoYHt~r@EfvnPE5EKUwIxjPbsr$% zfWW83pgWST7*B(o=kmo)74$8UU)v0{@4DI+ci&%=#90}!CZz|rnH+Mz=HN~97G3~@ z;v5(9_2%eca(9iu@J@aqaMS6*$TMw!S>H(b z4(*B!|H|8&EuB%mITr~O?vVEf%(Gr)6E=>H~1VR z&1YOXluJSG1!?TnT)_*YmJ*o_Q@om~(GdrhI{$Fsx_zrkupc#y{DK1WOUR>tk>ZE) ziOLoBkhZZ?0Uf}cm>GsA>Rd6V8@JF)J*EQlQ<=JD@m<)hyElXR0`pTku*3MU`HJn| zIf7$)RlK^pW-$87U;431;Ye4Ie+l~_B3*bH1>*yKzn23cH0u(i5pXV! z4K?{3oF7ZavmmtTq((wtml)m6i)8X6ot_mrE-QJCW}Yn!(3~aUHYG=^fA<^~`e3yc z-NWTb{gR;DOUcK#zPbN^D*e=2eR^_!(!RKkiwMW@@yYtEoOp4XjOGgzi`;=8 zi3`Ccw1%L*y(FDj=C7Ro-V?q)-%p?Ob2ZElu`eZ99n14-ZkEV#y5C+{Pq87Gu3&>g zFy~Wk7^6v*)4pF3@F@rE__k3ikx(hzN3@e*^0=KNA6|jC^B5nf(XaoQaZN?Xi}Rn3 z$8&m*KmWvPaUQ(V<#J+S&zO|8P-#!f%7G+n_%sXp9=J%Z4&9OkWXeuZN}ssgQ#Tcj z8p6ErJQJWZ+fXLCco=RN8D{W%+*kko*2-LEb))xcHwNl~Xmir>kmAxW?eW50Osw3# zki8Fl$#fvw*7rqd?%E?}ZX4`c5-R&w!Y0#EBbelVXSng+kUfeUiqofPehl}$ormli zg%r)}?%=?_pHb9`Cq9Z|B`L8b>(!+8HSX?`5+5mm81AFXfnAt1*R3F z%b2RPIacKAddx%JfQ8l{3U|vK@W7KB$CdLqn@wP^?azRks@x8z59#$Q*7q!KilY-P zHUbs(IFYRGG1{~@RF;Lqyho$~7^hNC`NL3kn^Td%A7dRgr_&`2k=t+}D-o9&C!y^? z6MsQ=tc3g0xkK(O%DzR9nbNB(r@L;1zQrs8mzx&4dz}?3KNYozOW5;=w18U6$G4U2 z#2^qRLT*Mo4bV1Oeo1PKQ2WQS2Y-hv&S|C7`xh6=Pj7MNLC5K-zokZ67S)C;(F0Dd zloDK2_o1$Fmza>EMj3X9je7e%Q`$39Dk~GoOj89-6q9|_WJlSl!!+*{R=tGp z8u|MuSwm^t7K^nUe+^0G3dkGZr3@(X+TL5eah)K^Tn zXEtHmR9UIaEYgD5Nhh(s*fcG_lh-mfy5iUF3xxpRZ0q3nZ=1qAtUa?(LnT9I&~uxX z`pV?+=|-Gl(kz?w!zIieXT}o}7@`QO>;u$Z!QB${a08_bW0_o@&9cjJUXzVyNGCm8 zm=W+$H!;_Kzp6WQqxUI;JlPY&`V}9C$8HZ^m?NvI*JT@~BM=()T()Ii#+*$y@lTZBkmMMda>7s#O(1YZR+zTG@&}!EXFG{ zEWPSDI5bFi;NT>Yj*FjH((=oe%t%xYmE~AGaOc4#9K_XsVpl<4SP@E!TgC0qpe1oi zNpxU2b0(lEMcoibQ-G^cxO?ySVW26HoBNa;n0}CWL*{k)oBu1>F18X061$SP{Gu67 z-v-Fa=Fl^u3lnGY^o5v)Bux}bNZ~ z5pL+7F_Esoun8^5>z8NFoIdb$sNS&xT8_|`GTe8zSXQzs4r^g0kZjg(b0bJvz`g<70u9Z3fQILX1Lj@;@+##bP|FAOl)U^9U>0rx zGi)M1(Hce)LAvQO-pW!MN$;#ZMX?VE(22lTlJrk#pB0FJNqVwC+*%${Gt#r_tH9I_ z;+#)#8cWAl?d@R+O+}@1A^hAR1s3UcW{G+>;X4utD2d9X(jF555}!TVN-hByV6t+A zdFR^aE@GNNgSxxixS2p=on4(+*+f<8xrwAObC)D5)4!z7)}mTpb7&ofF3u&9&wPS< zB62WHLGMhmrmOAgmJ+|c>qEWTD#jd~lHNgT0?t-p{T=~#EMcB| z=AoDKOL+qXCfk~F)-Rv**V}}gWFl>liXOl7Uec_8v)(S#av99PX1sQIVZ9eNLkhq$ zt|qu0b?GW_uo}TbU8!jYn8iJeIP)r@;!Ze_7mj{AUV$GEz6bDSDO=D!&C9!M@*S2! zfGyA|EPlXGMjkH6x7OMF?gKL7{GvGfED=Jte^p=91FpCu)#{whAMw`vSLa`K#atdN zThnL+7!ZNmP{rc=Z>%$meH;Qi1=m1E3Lq2D_O1-X5C;!I0L>zur@tPAC9*7Jeh)`;eec}1`nkRP(%iv-`N zZ@ip-g|7l6Hz%j%gcAM}6-nrC8oA$BkOTz^?dakvX?`^=ZkYh%vUE z9+&)K1UTK=ahYiaNn&G5nHUY5niLGus@p5E2@RwZufRvF{@$hW{;{3QhjvEHMvduO z#Wf-@oYU4ht?#uP{N3utVzV49mEc9>*TV_W2TVC`6+oI)zAjy$KJrr=*q##&kobiQ z1vNbya&OVjK`2pdRrM?LuK6BgrLN7H_3m z!qpNKg~87XgCwb#I=Q&0rI*l$wM!qTkXrx1ko5q-f;=R2fImRMwt5Qs{P*p^z@9ex z`2#v(qE&F%MXlHpdO#QEZyZftn4f05ab^f2vjxuFaat2}jke{j?5GrF=WYBR?gS(^ z9SBiNi}anzBDBRc+QqizTTQuJrzm^bNA~A{j%ugXP7McZqJ}65l10({wk++$=e8O{ zxWjG!Qp#5OmI#XRQQM?n6?1ztl6^D40hDJr?4$Wc&O_{*OfMfxe)V0=e{|N?J#fgE>j9jAajze$iN!*yeF%jJU#G1c@@rm zolGW!j?W6Q8pP=lkctNFdfgUMg92wlM4E$aks1??M$~WQfzzzXtS)wKrr2sJeCN4X zY(X^H_c^PzfcO8Bq(Q*p4c_v@F$Y8cHLrH$`pJ2}=#*8%JYdqsqnGqEdBQMpl!Ot04tUGSXTQdsX&GDtjbWD=prcCT9(+ z&UM%lW%Q3yrl1yiYs;LxzIy>2G}EPY6|sBhL&X&RAQrSAV4Tlh2nITR?{6xO9ujGu zr*)^E`>o!c=gT*_@6S&>0POxcXYNQd&HMw6<|#{eSute2C3{&h?Ah|cw56-AP^f8l zT^kvZY$YiH8j)sk7_=;gx)vx-PW`hbSBXJGCTkpt;ap(}G2GY=2bbjABU5)ty%G#x zAi07{Bjhv}>OD#5zh#$0w;-vvC@^}F! z#X$@)zIs1L^E;2xDAwEjaXhTBw2<{&JkF*`;c3<1U@A4MaLPe{M5DGGkL}#{cHL%* zYMG+-Fm0#qzPL#V)TvQVI|?_M>=zVJr9>(6ib*#z8q@mYKXDP`k&A4A};xMK0h=yrMp~JW{L?mE~ph&1Y1a#4%SO)@{ zK2juwynUOC)U*hVlJU17%llUxAJFuKZh3K0gU`aP)pc~bE~mM!i1mi!~LTf>1Wp< zuG+ahp^gH8g8-M$u{HUWh0m^9Rg@cQ{&DAO{PTMudV6c?ka7+AO& z746QylZ&Oj`1aqfu?l&zGtJnpEQOt;OAFq19MXTcI~`ZcoZmyMrIKDFRIDi`FH)w; z8+*8tdevMDv*VtQi|e}CnB_JWs>fhLOH-+Os2Lh!&)Oh2utl{*AwR)QVLS49iTp{6 z;|172Jl!Ml17unF+pd+Ff@jIE-{Oxv)5|pOm@CkHW?{l}b@1>Pe!l}VccX#xp@xgJ zyE<&ep$=*vT=}7vtvif0B?9xw_3Gej7mN*dOHdQPtW5kA5_zGD zpA4tV2*0E^OUimSsV#?Tg#oiQ>%4D@1F5@AHwT8Kgen$bSMHD3sXCkq8^(uo7CWk`mT zuslYq`6Yz;L%wJh$3l1%SZv#QnG3=NZ=BK4yzk#HAPbqXa92;3K5?0kn4TQ`%E%X} z&>Lbt!!QclYKd6+J7Nl@xv!uD%)*bY-;p`y^ZCC<%LEHUi$l5biu!sT3TGGSTPA21 zT8@B&a0lJHVn1I$I3I1I{W9fJAYc+8 zVj8>HvD}&O`TqU2AAb={?eT;0hyL(R{|h23=4fDSZKC32;wWxsVj`P z3J3{M$PwdH!ro*Cn!D&=jnFR>BNGR<<|I8CI@+@658Dy(lhqbhXfPTVecY@L8%`3Q z1Fux2w?2C3th60jI~%OC9BtpNF$QPqcG+Pz96qZJ71_`0o0w_q7|h&O>`6U+^BA&5 zXd5Zp1Xkw~>M%RixTm&OqpNl8Q+ue=92Op_>T~_9UON?ZM2c0aGm=^A4ejrXj3dV9 zhh_bCt-b9`uOX#cFLj!vhZ#lS8Tc47OH>*)y#{O9?AT~KR9LntM|#l#Dlm^8{nZdk zjMl#>ZM%#^nK2TPzLcKxqx24P7R1FPlBy7LSBrRvx>fE$9AJ;7{PQm~^LBX^k#6Zq zw*Z(zJC|`!6_)EFR}8|n8&&Rbj8y028~P~sFXBFRt+tmqH-S3<%N;C&WGH!f3{7cm zy_fCAb9@HqaXa1Y5vFbxWf%#zg6SI$C+Uz5=CTO}e|2fjWkZ;Dx|84Ow~bkI=LW+U zuq;KSv9VMboRvs9)}2PAO|b(JCEC_A0wq{uEj|3x@}*=bOd zwr{TgeCGG>HT<@Zeq8y}vTpwDg#UBvD)BEs@1KP$^3$sh&_joQPn{hjBXmLPJ{tC) z*HS`*2+VtJO{|e$mM^|qv1R*8i(m1`%)}g=SU#T#0KlTM2RSvYUc1fP+va|4;5}Bfz98UvDCpq7}+SMV&;nX zQw~N6qOX{P55{#LQkrZk(e5YGzr|(B;Q;ju;2a`q+S9bsEH@i1{_Y0;hWYn1-79jl z5c&bytD*k)GqrVcHn6t-7kinadiD>B{Tl`ZY@`g|b~pvHh5!gKP4({rp?D0aFd_cN zhHRo4dd5^S6ViN(>(28qZT6E>??aRhc($kP`>@<+lIKS5HdhjVU;>f7<4))E*5|g{ z&d1}D|vpuV^eRj5j|xx9nwaCxXFG?Qbjn~_WSy=N}P0W>MP zG-F%70lX5Xr$a)2i6?i|iMyM|;Jtf*hO?=Jxj12oz&>P=1#h~lf%#fc73M2_(SUM- zf&qnjS80|_Y0lDgl&I?*eMumUklLe_=Td!9G@eR*tcPOgIShJipp3{A10u(4eT~DY zHezEj8V+7m!knn7)W!-5QI3=IvC^as5+TW1@Ern@yX| z7Nn~xVx&fGSr+L%4iohtS3w^{-H1A_5=r&x8}R!YZvp<2T^YFvj8G_vm}5q;^UOJf ztl=X3iL;;^^a#`t{Ae-%5Oq{?M#s6Npj+L(n-*LMI-yMR{)qki!~{5z{&`-iL}lgW zxo+tnvICK=lImjV$Z|O_cYj_PlEYCzu-XBz&XC-JVxUh9;6*z4fuBG+H{voCC;`~GYV|hj%j_&I zDZCj>Q_0RCwFauYoVMiUSB+*Mx`tg)bWmM^SwMA+?lBg12QUF_x2b)b?qb88K-YUd z0dO}3k#QirBV<5%jL$#wlf!60dizu;tsp(7XLdI=eQs?P`tOZYMjVq&jE)qK*6B^$ zBe>VvH5TO>s>izhwJJ$<`a8fakTL!yM^Zfr2hV9`f}}VVUXK39p@G|xYRz{fTI+Yq z20d=)iwjuG9RB$%$^&8#(c0_j0t_C~^|n+c`Apu|x7~;#cS-s=X1|C*YxX3ailhg_|0`g!E&GZJEr?bh#Tpb8siR=JxWKc{#w7g zWznLwi;zLFmM1g8V5-P#RsM@iX>TK$xsWuujcsVR^7TQ@!+vCD<>Bk9tdCo7Mzgq5 zv8d>dK9x8C@Qoh01u@3h0X_`SZluTb@5o;{4{{eF!-4405x8X7hewZWpz z2qEi4UTiXTvsa(0X7kQH{3VMF>W|6;6iTrrYD2fMggFA&-CBEfSqPlQDxqsa>{e2M z(R5PJ7uOooFc|9GU0ELA%m4&4Ja#cQpNw8i8ACAoK6?-px+oBl_yKmenZut#Xumjz zk8p^OV2KY&?5MUwGrBOo?ki`Sxo#?-Q4gw*Sh0k`@ zFTaYK2;}%Zk-68`#5DXU$2#=%YL#S&MTN8bF+!J2VT6x^XBci6O)Q#JfW{YMz) zOBM>t2rSj)n#0a3cjvu}r|k3od6W(SN}V-cL?bi*Iz-8uOcCcsX0L>ZXjLqk zZu2uHq5B|Kt>e+=pPKu=1P@1r9WLgYFq_TNV1p9pu0erHGd!+bBp!qGi+~4A(RsYN@CyXNrC&hxGmW)u5m35OmWwX`I+0yByglO`}HC4nGE^_HUs^&A(uaM zKPj^=qI{&ayOq#z=p&pnx@@k&I1JI>cttJcu@Ihljt?6p^6{|ds`0MoQwp+I{3l6` zB<9S((RpLG^>=Kic`1LnhpW2=Gu!x`m~=y;A`Qk!-w`IN;S8S930#vBVMv2vCKi}u z6<-VPrU0AnE&vzwV(CFC0gnZYcpa-l5T0ZS$P6(?9AM;`Aj~XDvt;Jua=jIgF=Fm? zdp=M$>`phx%+Gu};;-&7T|B1AcC#L4@mW5SV_^1BRbo6;2PWe$r+npRV`yc;T1mo& z+~_?7rA+(Um&o@Tddl zL_hxvWk~a)yY}%j`Y+200D%9$bWHy&;(yj{jpi?Rtz{J66ANw)UyPOm;t6FzY3$hx zcn)Ir79nhFvNa7^a{SHN7XH*|Vlsx`CddPnA&Qvh8aNhEA;mPVv;Ah=k<*u!Zq^7 z<=xs*iQTQOMMcg|(NA_auh@x`3#_LFt=)}%SQppP{E>mu_LgquAWvh<>L7tf9+~rO znwUDS52u)OtY<~!d$;m9+87aO+&`#2ICl@Y>&F{jI=H(K+@3M1$rr=*H^dye#~TyD z!){#Pyfn+|ugUu}G;a~!&&0aqQ59U@UT3|_JuBlYUpT$2+11;}JBJ`{+lQN9T@QFY z5+`t;6(TS0F?OlBTE!@7D`8#URDNqx2t6`GZ{ZgXeS@v%-eJzZOHz18aS|svxII$a zZeFjrJ*$IwX$f-Rzr_G>xbu@euGl)B7pC&S+CmDJBg$BoV~jxSO#>y z33`bupN#LDoW0feZe0%q8un0rYN|eRAnwDHQ6e_)xBTbtoZtTA=Fvk){q}9Os~6mQ zKB80VI_&6iSq`LnK7*kfHZoeX6?WE}8yjuDn=2#JG$+;-TOA1%^=DnXx%w{b=w}tS zQbU3XxtOI8E(!%`64r2`zog;5<0b4i)xBmGP^jiDZ2%HNSxIf3@wKs~uk4%3Mxz;~ zts_S~E4>W+YwI<-*-$U8*^HKDEa8oLbmqGg?3vewnaNg%Mm)W=)lcC_J+1ov^u*N3 zXJ?!BrH-+wGYziJq2Y#vyry6Z>NPgkEk+Ke`^DvNRdb>Q2Nlr#v%O@<5hbflI6EKE z9dWc0-ORk^T}jP!nkJ1imyjdVX@GrjOs%cpgA8-c&FH&$(4od#x6Y&=LiJZPINVyW z0snY$8JW@>tc2}DlrD3StQmA0Twck~@>8dSix9CyQOALcREdxoM$Sw*l!}bXKq9&r zysMWR@%OY24@e`?+#xV2bk{T^C_xSo8v2ZI=lBI*l{RciPwuE>L5@uhz@{!l)rtVlWC>)6(G)1~n=Q|S!{E9~6*fdpa*n z!()-8EpTdj=zr_Lswi;#{TxbtH$8*G=UM`I+icz7sr_SdnHXrv=?iEOF1UL+*6O;% zPw>t^kbW9X@oEXx<97%lBm-9?O_7L!DeD)Me#rwE54t~UBu9VZ zl_I1tBB~>jm@bw0Aljz8! zXBB6ATG6iByKIxs!qr%pz%wgqbg(l{65DP4#v(vqhhL{0b#0C8mq`bnqZ1OwFV z7mlZZJFMACm>h9v^2J9+^_zc1=JjL#qM5ZHaThH&n zXPTsR8(+)cj&>Un{6v*z?@VTLr{TmZ@-fY%*o2G}*G}#!bmqpoo*Ay@U!JI^Q@7gj;Kg-HIrLj4}#ec4~D2~X6vo;ghep-@&yOivYP zC19L0D`jjKy1Yi-SGPAn94(768Tcf$urAf{)1)9W58P`6MA{YG%O?|07!g9(b`8PXG1B1Sh0?HQmeJtP0M$O$hI z{5G`&9XzYhh|y@qsF1GnHN|~^ru~HVf#)lOTSrv=S@DyR$UKQk zjdEPFDz{uHM&UM;=mG!xKvp;xAGHOBo~>_=WFTmh$chpC7c`~7?36h)7$fF~Ii}8q zF|YXxH-Z?d+Q+27Rs3X9S&K3N+)OBxMHn1u(vlrUC6ckBY@@jl+mgr#KQUKo#VeFm zFwNYgv0<%~Wn}KeLeD9e1$S>jhOq&(e*I@L<=I5b(?G(zpqI*WBqf|Zge0&aoDUsC zngMRA_Kt0>La+Erl=Uv_J^p(z=!?XHpenzn$%EA`JIq#yYF?JLDMYiPfM(&Csr#f{ zdd+LJL1by?xz|D8+(fgzRs~(N1k9DSyK@LJygwaYX8dZl0W!I&c^K?7)z{2is;OkE zd$VK-(uH#AUaZrp=1z;O*n=b?QJkxu`Xsw&7yrX0?(CX=I-C#T;yi8a<{E~?vr3W> zQrpPqOW2M+AnZ&p{hqmHZU-;Q(7?- zP8L|Q0RM~sB0w1w53f&Kd*y}ofx@c z5Y6B8qGel+uT1JMot$nT1!Tim6{>oZzJXdyA+4euOLME?5Fd_85Uk%#E*ln%y{u8Q z$|?|R@Hpb~yTVK-Yr_S#%NUy7EBfYGAg>b({J|5b+j-PBpPy$Ns`PaJin4JdRfOaS zE|<HjH%NuJgsd2wOlv>~y=np%=2)$M9LS|>P)zJ+Fei5vYo_N~B0XCn+GM76 z)Xz3tg*FRVFgIl9zpESgdpWAavvVViGlU8|UFY{{gVJskg*I!ZjWyk~OW-Td4(mZ6 zB&SQreAAMqwp}rjy`HsG({l2&q5Y52<@AULVAu~rWI$UbFuZs>Sc*x+XI<+ez%$U)|a^unjpiW0l0 zj1!K0(b6$8LOjzRqQ~K&dfbMIE=TF}XFAi)$+h}5SD3lo z%%Qd>p9se=VtQG{kQ;N`sI)G^u|DN#7{aoEd zkksYP%_X$Rq08);-s6o>CGJ<}v`qs%eYf+J%DQ^2k68C%nvikRsN?$ap--f+vCS`K z#&~)f7!N^;sdUXu54gl3L=LN>FB^tuK=y2e#|hWiWUls__n@L|>xH{%8lIJTd5`w? zSwZbnS;W~DawT4OwSJVdAylbY+u5S+ZH{4hAi2&}Iv~W(UvHg(1GTZRPz`@{SOqzy z(8g&Dz=$PfRV=6FgxN~zo+G8OoPI&d-thcGVR*_^(R8COTM@bq?fDwY{}WhsQS1AK zF6R1t8!RdFmfocpJ6?9Yv~;WYi~XPgs(|>{5})j!AR!voO7y9&cMPo#80A(`za@t>cx<0;qxM@S*m(jYP)dMXr*?q0E`oL;12}VAep179uEr8c<=D zr5?A*C{eJ`z9Ee;E$8)MECqatHkbHH z&Y+ho0B$31MIB-xm&;xyaFCtg<{m~M-QDbY)fQ>Q*Xibb~8ytxZQ?QMf9!%cV zU0_X1@b4d+Pg#R!`OJ~DOrQz3@cpiGy~XSKjZQQ|^4J1puvwKeScrH8o{bscBsowomu z^f12kTvje`yEI3eEXDHJ6L+O{Jv$HVj%IKb|J{IvD*l6IG8WUgDJ*UGz z3!C%>?=dlfSJ>4U88)V+`U-!9r^@AxJBx8R;)J4Fn@`~k>8>v0M9xp90OJElWP&R5 zM#v*vtT}*Gm1^)Bv!s72T3PB0yVIjJW)H7a)ilkAvoaH?)jjb`MP>2z{%Y?}83 zUIwBKn`-MSg)=?R)1Q0z3b>dHE^)D8LFs}6ASG1|daDly_^lOSy&zIIhm*HXm1?VS=_iacG);_I9c zUQH1>i#*?oPIwBMJkzi_*>HoUe}_4o>2(SHWzqQ=;TyhAHS;Enr7!#8;sdlty&(>d zl%5cjri8`2X^Ds`jnw7>A`X|bl=U8n+3LKLy(1dAu8`g@9=5iw$R0qk)w8Vh_Dt^U zIglK}sn^)W7aB(Q>HvrX=rxB z+*L)3DiqpQ_%~|m=44LcD4-bxO3OO*LPjsh%p(k?&jvLp0py57oMH|*IMa(<|{m1(0S|x)?R-mqJ=I;_YUZA>J z62v*eSK;5w!h8J+6Z2~oyGdZ68waWfy09?4fU&m7%u~zi?YPHPgK6LDwphgaYu%0j zurtw)AYOpYKgHBrkX189mlJ`q)w-f|6>IER{5Lk97%P~a-JyCRFjejW@L>n4vt6#hq;!|m;hNE||LK3nw1{bJOy+eBJjK=QqNjI;Q6;Rp5 z&035pZDUZ#%Oa;&_7x0T<7!RW`#YBOj}F380Bq?MjjEhrvlCATPdkCTTl+2efTX$k zH&0zR1n^`C3ef~^sXzJK-)52(T}uTG%OF8yDhT76L~|^+hZ2hiSM*QA9*D5odI1>& z9kV9jC~twA5MwyOx(lsGD_ggYmztXPD`2=_V|ks_FOx!_J8!zM zTzh^cc+=VNZ&(OdN=y4Juw)@8-85lwf_#VMN!Ed(eQiRiLB2^2e`4dp286h@v@`O%_b)Y~A; zv}r6U?zs&@uD_+(_4bwoy7*uozNvp?bXFoB8?l8yG0qsm1JYzIvB_OH4_2G*IIOwT zVl%HX1562vLVcxM_RG*~w_`FbIc!(T=3>r528#%mwwMK}uEhJ()3MEby zQQjzqjWkwfI~;Fuj(Lj=Ug0y`>~C7`w&wzjK(rPw+Hpd~EvQ-ufQOiB4OMpyUKJhw zqEt~jle9d7S~LI~$6Z->J~QJ{Vdn3!c}g9}*KG^Kzr^(7VI5Gk(mHLL{itj_hG?&K4Ws0+T4gLfi3eu$N=`s36geNC?c zm!~}vG6lx9Uf^5M;bWntF<-{p^bruy~f?sk9 zcETAPQZLoJ8JzMMg<-=ju4keY@SY%Wo?u9Gx=j&dfa6LIAB|IrbORLV1-H==Z1zCM zeZcOYpm5>U2fU7V*h;%n`8 zN95QhfD994={1*<2vKLCNF)feKOGk`R#K~G=;rfq}|)s20&MCa65 zUM?xF5!&e0lF%|U!#rD@I{~OsS_?=;s_MQ_b_s=PuWdC)q|UQ&ea)DMRh5>fpQjXe z%9#*x=7{iRCtBKT#H>#v%>77|{4_slZ)XCY{s3j_r{tdpvb#|r|sbS^dU1x70$eJMU!h{Y7Kd{dl}9&vxQl6Jt1a` zHQZrWyY0?!vqf@u-fxU_@+}u(%Wm>0I#KP48tiAPYY!TdW(o|KtVI|EUB9V`CBBNaBLVih7+yMVF|GSoIQD0Jfb{ z!OXq;(>Z?O`1gap(L~bUcp>Lc@Jl-})^=6P%<~~9ywY=$iu8pJ0m*hOPzr~q`23eX zgbs;VOxxENe0UMVeN*>uCn9Gk!4siN-e>x)pIKAbQz!G)TcqIJ0`JBBaX>1-4_XO_-HCS^vr2vjv#7KltDZdyQ{tlWh4$Gm zB>|O1cBDC)yG(sbnc*@w6e%e}r*|IhpXckx&;sQCwGdKH+3oSG-2)Bf#x`@<4ETAr z0My%7RFh6ZLiZ_;X6Mu1YmXx7C$lSZ^}1h;j`EZd6@%JNUe=btBE z%s=Xmo1Ps?8G`}9+6>iaB8bgjUdXT?=trMu|4yLX^m0Dg{m7rpKNJey|EwHI+nN1e zL^>qN%5Fg)dGs4DO~uwIdXImN)QJ*Jhpj7$fq_^`{3fwpztL@WBB}OwQ#Epo-mqMO zsM$UgpFiG&d#)lzEQ{3Q;)&zTw;SzGOah-Dpm{!q7<8*)Ti_;xvV2TYXa}=faXZy? z3y?~GY@kl)>G&EvEijk9y1S`*=zBJSB1iet>0;x1Ai)*`^{pj0JMs)KAM=@UyOGtO z3y0BouW$N&TnwU6!%zS%nIrnANvZF&vB1~P5_d`x-giHuG zPJ;>XkVoghm#kZXRf>qxxEix;2;D1CC~NrbO6NBX!`&_$iXwP~P*c($EVV|669kDO zKoTLZNF4Cskh!Jz5ga9uZ`3o%7Pv`d^;a=cXI|>y;zC3rYPFLQkF*nv(r>SQvD*## z(Vo%^9g`%XwS0t#94zPq;mYGLKu4LU3;txF26?V~A0xZbU4Lmy`)>SoQX^m7fd^*E z+%{R4eN!rIk~K)M&UEzxp9dbY;_I^c} zOc{wlIrN_P(PPqi51k_$>Lt|X6A^|CGYgKAmoI#Li?;Wq%q~q*L7ehZkUrMxW67Jl zhsb~+U?33QS>eqyN{(odAkbopo=Q$Az?L+NZW>j;#~@wCDX?=L5SI|OxI~7!Pli;e zELMFcZtJY3!|=Gr2L4>z8yQ-{To>(f80*#;6`4IAiqUw`=Pg$%C?#1 z_g@hIGerILSU>=P>z{gM|DS91A4cT@PEIB^hSop!uhMo#2G;+tQSpDO_6nOnPWSLU zS;a9m^DFMXR4?*X=}d7l;nXuHk&0|m`NQn%d?8|Ab3A9l9Jh5s120ibWBdB z$5YwsK3;wvp!Kn@)Qae{ef`0#NwlRpQ}k^r>yos_Ne1;xyKLO?4)t_G4eK~wkUS2A&@_;)K0-03XGBzU+5f+uMDxC z(s8!8!RvdC#@`~fx$r)TKdLD6fWEVdEYtV#{ncT-ZMX~eI#UeQ-+H(Z43vVn%Yj9X zLdu9>o%wnWdvzA-#d6Z~vzj-}V3FQ5;axDIZ;i(95IIU=GQ4WuU{tl-{gk!5{l4_d zvvb&uE{%!iFwpymz{wh?bKr1*qzeZb5f6e6m_ozRF&zux2mlK=v_(_s^R6b5lu?_W4W3#<$zeG~Pd)^!4tzhs}-Sx$FJP>)ZGF(hVTH|C3(U zs0PO&*h_ zNA-&qZpTP$$LtIgfiCn07}XDbK#HIXdmv8zdz4TY;ifNIH-0jy(gMSByG2EF~Th#eb_TueZC` zE?3I>UTMpKQ})=C;6p!?G)M6w^u*A57bD?2X`m3X^6;&4%i_m(uGJ3Z5h`nwxM<)H z$I5m?wN>O~8`BGnZ=y^p6;0+%_0K}Dcg|K;+fEi|qoBqvHj(M&aHGqNF48~XqhtU? z^ogwBzRlOfpAJ+Rw7IED8lRbTdBdyEK$gPUpUG}j-M42xDj_&qEAQEtbs>D#dRd7Y z<&TpSZ(quQDHiCFn&0xsrz~4`4tz!CdL8m~HxZM_agu@IrBpyeL1Ft}V$HX_ZqDPm z-f89)pjuEzGdq-PRu`b1m+qBGY{zr_>{6Ss>F|xHZlJj9dt5HD$u`1*WZe)qEIuDSR)%z+|n zatVlhQ?$w#XRS7xUrFE;Y8vMGhQS5*T{ZnY=q1P?w5g$OKJ#M&e??tAmPWHMj3xhS ziGxapy?kn@$~2%ZY;M8Bc@%$pkl%Rvj!?o%agBvpQ-Q61n9kznC4ttrRNQ4%GFR5u zyv%Yo9~yxQJWJSfj z?#HY$y=O~F|2pZs22pu|_&Ajd+D(Mt!nPUG{|1nlvP`=R#kKH zO*s$r_%ss5h1YO7k0bHJ2CXN)Yd6CHn~W!R=SqkWe=&nAZu(Q1G!xgcUilM@YVei@2@a`8he z9@pM`)VB*=e7-MWgLlXlc)t;fF&-AwM{E-EX}pViFn0I0CNw2bNEnN2dj!^4(^zS3 zobUm1uQnpqk_4q{pl*n06=TfK_C>UgurKFjRXsK_LEn};=79`TB12tv6KzwSu*-C8 z;=~ohDLZylHQ|Mpx-?yql>|e=vI1Z!epyUpAcDCp4T|*RV&X`Q$0ogNwy6mFALo^@ z9=&(9txO8V@E!@6^(W0{*~CT>+-MA~vnJULBxCTUW>X5>r7*eXYUT0B6+w@lzw%n> z_VjJ<2qf|(d6jYq2(x$(ZDf!yVkfnbvNmb5c|hhZ^2TV_LBz`9w!e_V*W_(MiA7|= z&EeIIkw*+$Xd!)j8<@_<}A5;~A_>3JT*kX^@}cDoLd>Qj<`Se^wdUa(j0dp+Tl8EptwBm{9OGsdFEq zM`!pjf(Lm(`$e3FLOjqA5LnN5o!}z{ zNf}rJuZh@yUtq&ErjHeGzX4(!luV!jB&;FAP|!R_QHYw#^Z1LwTePAKJ6X&IDNO#; z)#I@Xnnzyij~C@UH~X51JCgQeF0&hTXnuoElz#m{heZRexWc0k4<>0+ClX7%0 zEBqCCld1tD9Zwkr4{?Nor19#E5-YKfB8d?qgR82-Ow2^AuNevly2*tHA|sK!ybYkX zm-sLQH72P&{vEAW6+z~O5d0qd=xW~rua~5a?ymYFSD@8&gV)E5@RNNBAj^C99+Z5Z zR@Pq55mbCQbz+Mn$d_CMW<-+?TU960agEk1J<>d>0K=pF19yN))a~4>m^G&tc*xR+yMD*S=yip-q=H zIlredHpsJV8H(32@Zxc@bX6a21dUV95Th--8pE6C&3F>pk=yv$yd6@Haw;$v4+Fcb zRwn{Qo@0`7aPa2LQOP}j9v>sjOo5Kqvn|`FLizX zB+@-u4Lw|jsvz{p^>n8Vo8H2peIqJJnMN}A)q6%$Tmig7eu^}K2 zrh$X?T|ZMsoh{6pdw1G$_T<`Ds-G=jc;qcGdK4{?dN2-XxjDNbb(7pk|3JUVCU4y; z)?LXR>f+AAu)JEiti_Zy#z5{RgsC}R(@jl%9YZ>zu~hKQ*AxbvhC378-I@{~#%Y`Z zy=a=9YpewPIC+gkEUUwtUL7|RU7=!^Aa}Mk^6uxOgRGA#JXjWLsjFUnix|Mau{hDT z7mn*z1m5g`vP(#tjT0Zy4eAY(br&!RiiXE=ZI!{sE1#^#%x^Z7t1U)b<;%Y}Q9=5v z;wpDCEZ@OE36TWT=|gxigT@VaW9BvHS05;_P(#s z8zI4XFQys}q)<`tkX$WnSarn{3e!s}4(J!=Yf>+Y>cP3f;vr63f2{|S^`_pWc)^5_!R z*(x-fuBxL51@xe!lnDBKi}Br$c$BMZ3%f2Sa6kLabiBS{pq*yj;q|k(86x`PiC{p6 z_bxCW{>Q2BA8~Ggz&0jkrcU+-$ANBsOop*ms>34K9lNYil@}jC;?cYP(m^P}nR6FV zk(M%48Z&%2Rx$A&FhOEirEhY0(dn;-k(qkTU)sFQ`+-ih+s@A8g?r8Pw+}2;35WYf zi}VO`jS`p(tc)$X$a>-#WXoW!phhatC*$}|rk>|wUU71eUJG^$c6_jwX?iSHM@6__ zvV|6%U*$sSXJu9SX?2%M^kK|}a2QJ8AhF{fuXrHZxXsI~O zGKX45!K7p*MCPEQ=gp?eu&#AW*pR{lhQR##P_*{c_DjMGL|3T3-bSJ(o$|M{ytU}> zAV>wq*uE*qFo9KvnA^@juy{x<-u*#2NvkV={Ly}ysKYB-k`K3@K#^S1Bb$8Y#0L0# z`6IkSG&|Z$ODy|VLS+y5pFJx&8tvPmMd8c9FhCyiU8~k6FwkakUd^(_ml8`rnl>JS zZV){9G*)xBqPz^LDqRwyS6w86#D^~xP4($150M)SOZRe9sn=>V#aG0Iy(_^YcPpIz8QYM-#s+n% z@Jd?xQq?Xk6=<3xSY7XYP$$yd&Spu{A#uafiIfy8gRC`o0nk{ezEDjb=q_qRAlR1d zFq^*9Gn)yTG4b}R{!+3hWQ+u3GT~8nwl2S1lpw`s0X_qpxv)g+JIkVKl${sYf_nV~B>Em>M;RlqGb5WVil(89 zs=ld@|#;dq1*vQGz=7--Br-|l) zZ%Xh@v8>B7P?~}?Cg$q9_={59l%m~O&*a6TKsCMAzG&vD>k2WDzJ6!tc!V)+oxF;h zJH;apM=wO?r_+*#;ulohuP=E>^zon}a$NnlcQ{1$SO*i=jnGVcQa^>QOILc)e6;eNTI>os=eaJ{*^DE+~jc zS}TYeOykDmJ=6O%>m`i*>&pO_S;qMySJIyP=}4E&J%#1zju$RpVAkZbEl+p%?ZP^C z*$$2b4t%a(e+%>a>d_f_<JjxI#J1x;=hPd1zFPx=6T$;;X1TD*2(edZ3f46zaAoW>L53vS_J*N8TMB|n+;LD| zC=GkQPpyDY#Am4l49chDv*gojhRj_?63&&8#doW`INATAo(qY#{q}%nf@eTIXmtU< zdB<7YWfyCmBs|c)cK>1)v&M#!yNj#4d$~pVfDWQc_ke1?fw{T1Nce_b`v|Vp5ig(H zJvRD^+ps46^hLX;=e2!2e;w9y1D@!D$c@Jc&%%%IL=+xzw55&2?darw=9g~>P z9>?Kdc$r?6c$m%x2S$sdpPl>GQZ{rC9mPS63*qjCVa?OIBj!fW zm|g?>CVfGXNjOfcyqImXR_(tXS(F{FcoNzKvG5R$IgGaxC@)i(e+$ME}vPVIhd|mx2IIE+f zM?9opQHIVgBWu)^A|RzXw!^??S!x)SZOwZaJkGjc<_}2l^eSBm!eAJG9T>EC6I_sy z?bxzDIAn&K5*mX)$RQzDA?s)-no-XF(g*yl4%+GBf`##bDXJ==AQk*xmnatI;SsLp zP9XTHq5mmS=iWu~9ES>b%Q=1aMa|ya^vj$@qz9S!ih{T8_PD%Sf_QrNKwgrXw9ldm zHRVR98*{C?_XNpJn{abA!oix_mowRMu^2lV-LPi;0+?-F(>^5#OHX-fPED zCu^l7u3E%STI}c4{J2!)9SUlGP_@!d?5W^QJXOI-Ea`hFMKjR7TluLvzC-ozCPn1`Tpy z!vlv@_Z58ILX6>nDjTp-1LlFMx~-%GA`aJvG$?8*Ihn;mH37eK**rmOEwqegf-Ccx zrIX4;{c~RK>XuTXxYo5kMiWMy)!IC{*DHG@E$hx?RwP@+wuad(P1{@%tRkyJRqD)3 zMHHHZ4boqDn>-=DgR5VlhQTpfVy182Gk;A_S8A1-;U1RR>+$62>(MUx@Nox$vTjHq z%QR=j!6Gdyb5wu7y(YUktwMuW5<@jl?m4cv4BODiT5o8qVdC0MBqGr@-YBIwnpZAY znX9(_uQjP}JJ=!~Ve9#5I~rUnN|P_3D$LqZcvBnywYhjlMSFHm`;u9GPla{5QD7(7*6Tb3Svr8;(nuAd81q$*uq6HC_&~je*Ca7hP4sJp0av{M8480wF zxASi7Qv+~@2U%Nu1Ud;s-G4CTVWIPyx!sg&8ZG0Wq zG_}i3C(6_1>q3w!EH7$Kwq8uBp2F2N7}l65mk1p*9v0&+;th=_E-W)E;w}P(j⁢ zv5o9#E7!G0XmdzfsS{efPNi`1b44~SZ4Z8fuX!I}#8g+(wxzQwUT#Xb2(tbY1+EUhGKoT@KEU9Ktl>_0 z%bjDJg;#*gtJZv!-Zs`?^}v5eKmnbjqlvnSzE@_SP|LG_PJ6CYU+6zY6>92%E+ z=j@TZf-iW4(%U{lnYxQA;7Q!b;^brF8n0D>)`q5>|WDDXLrqYU_tKN2>=#@~OE7grMnNh?UOz-O~6 z6%rHy{#h9K0AT+lDC7q4{hw^|q6*Ry;;L%Q@)Ga}$60_q%D)rv(CtS$CQbpq9|y1e zRSrN4;$Jyl{m5bZw`$8TGvb}(LpY{-cQ)fcyJv7l3S52TLXVDsphtv&aPuDk1OzCA z4A^QtC(!11`IsNx_HnSy?>EKpHJWT^wmS~hc^p^zIIh@9f6U@I2 zC=Mve{j2^)mS#U$e{@Q?SO6%LDsXz@SY+=cK_QMmXBIU)j!$ajc-zLx3V60EXJ!qC zi<%2x8Q24YN+&8U@CIlN zrZkcT9yh%LrlGS9`G)KdP(@9Eo-AQz@8GEFWcb7U=a0H^ZVbLmz{+&M7W(nXJ4sN8 zJLR7eeK(K8`2-}j(T7JsO`L!+CvbueT%izanm-^A1Dn{`1Nw`9P?cq;7no+XfC`K(GO9?O^5zNIt4M+M8LM0=7Gz8UA@Z0N+lg+cX)NfazRu z5D)~HA^(u%w^cz+@2@_#S|u>GpB+j4KzQ^&Wcl9f z&hG#bCA(Yk0D&t&aJE^xME^&E-&xGHhXn%}psEIj641H+Nl-}boj;)Zt*t(4wZ5DN z@GXF$bL=&pBq-#vkTkh>7hl%K5|3 z{`Vn9b$iR-SoGENp}bn4;fR3>9sA%X2@1L3aE9yTra;Wb#_`xWwLSLdfu+PAu+o3| zGVnpzPr=ch{uuoHjtw7+_!L_2;knQ!DuDl0R`|%jr+}jFzXtrHIKc323?JO{l&;VF z*L1+}JU7%QJOg|5|Tc|D8fN zJORAg=_vsy{ak|o);@)Yh8Lkcg@$FG3k@ep36BRa^>~UmnRPziS>Z=`Jb2x*Q#`%A zU*i3&Vg?TluO@X0O;r2Jl6LKLUOVhSqg1*qOt^|8*c7 zo(298@+r$k_wQNGHv{|$tW(T8L+4_`FQ{kEW5Jgg{yf7ey4ss_(SNKfz(N9lx&a;< je(UuV8hP?p&}TPdm1I$XmG#(RzlD&B2izSj9sl%y5~4qc diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index b6114cae..68e8816d 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,8 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-bin.zip +distributionSha256Sum=d725d707bfabd4dfdc958c624003b3c80accc03f7037b5122c4b1d0ef15cecab +distributionUrl=https\://services.gradle.org/distributions/gradle-8.9-bin.zip networkTimeout=10000 +validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=9631d53cf3e74bfa726893aee1f8994fee4e060c401335946dba2156f440f24c diff --git a/gradlew b/gradlew index 1aa94a42..f5feea6d 100755 --- a/gradlew +++ b/gradlew @@ -15,6 +15,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +# SPDX-License-Identifier: Apache-2.0 +# ############################################################################## # @@ -55,7 +57,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. @@ -84,7 +86,8 @@ done # shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) -APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s +' "$PWD" ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum diff --git a/gradlew.bat b/gradlew.bat index 6689b85b..0ebb4c6c 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -13,6 +13,8 @@ @rem See the License for the specific language governing permissions and @rem limitations under the License. @rem +@rem SPDX-License-Identifier: Apache-2.0 +@rem @if "%DEBUG%"=="" @echo off @rem ########################################################################## From 0fbce606bf914597854ee7dcb5eaaab9a8931279 Mon Sep 17 00:00:00 2001 From: zane-neo Date: Mon, 29 Jul 2024 14:41:15 +0800 Subject: [PATCH 106/119] Add 2.16 release note (#373) Signed-off-by: zane-neo --- release-notes/opensearch-skills.release-notes-2.16.0.0.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 release-notes/opensearch-skills.release-notes-2.16.0.0.md diff --git a/release-notes/opensearch-skills.release-notes-2.16.0.0.md b/release-notes/opensearch-skills.release-notes-2.16.0.0.md new file mode 100644 index 00000000..2a474746 --- /dev/null +++ b/release-notes/opensearch-skills.release-notes-2.16.0.0.md @@ -0,0 +1,8 @@ +# 2024-07-29 Version 2.16.0.0 + +Compatible with OpenSearch 2.16.0 + +### Features +* support nested query in neural sparse tool, vectorDB tool and RAG tool ([#350](https://github.com/opensearch-project/skills/pull/350)) +* Add cluster setting to control ppl execution ([#344](https://github.com/opensearch-project/skills/pull/344)) +* Add CreateAnomalyDetectorTool ([#348](https://github.com/opensearch-project/skills/pull/348)) From c52dbeaeaec15877103172892018f71c9a081156 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Fri, 2 Aug 2024 14:00:29 +0800 Subject: [PATCH 107/119] Add includeFields parameter to the method extractFieldNamesTypes (#376) (#378) * Add includeFields parameter to the method extractFieldNamesTypes * Remove empty line --------- (cherry picked from commit 5a9dbcd7686e62d1a64d993bc074adde602421cb) Signed-off-by: gaobinlong Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../tools/CreateAnomalyDetectorTool.java | 2 +- .../org/opensearch/agent/tools/PPLTool.java | 2 +- .../agent/tools/utils/ToolHelper.java | 19 ++-- .../agent/tools/ToolHelperTests.java | 90 +++++++++++++++++++ 4 files changed, 106 insertions(+), 7 deletions(-) create mode 100644 src/test/java/org/opensearch/agent/tools/ToolHelperTests.java diff --git a/src/main/java/org/opensearch/agent/tools/CreateAnomalyDetectorTool.java b/src/main/java/org/opensearch/agent/tools/CreateAnomalyDetectorTool.java index 9014c907..52811e61 100644 --- a/src/main/java/org/opensearch/agent/tools/CreateAnomalyDetectorTool.java +++ b/src/main/java/org/opensearch/agent/tools/CreateAnomalyDetectorTool.java @@ -199,7 +199,7 @@ public void run(Map parameters, ActionListener listener) // flatten all the fields in the mapping Map fieldsToType = new HashMap<>(); - ToolHelper.extractFieldNamesTypes(mappingSource, fieldsToType, ""); + ToolHelper.extractFieldNamesTypes(mappingSource, fieldsToType, "", true); // find all date type fields from the mapping final Set dateFields = findDateTypeFields(fieldsToType); diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index a7cca56e..8a8d09f7 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -397,7 +397,7 @@ private String constructTableInfo(SearchHit[] searchHits, Map fieldsToType = new HashMap<>(); - ToolHelper.extractFieldNamesTypes(mappingSource, fieldsToType, ""); + ToolHelper.extractFieldNamesTypes(mappingSource, fieldsToType, "", false); StringJoiner tableInfoJoiner = new StringJoiner("\n"); List sortedKeys = new ArrayList<>(fieldsToType.keySet()); Collections.sort(sortedKeys); diff --git a/src/main/java/org/opensearch/agent/tools/utils/ToolHelper.java b/src/main/java/org/opensearch/agent/tools/utils/ToolHelper.java index 34c80135..d7f6c3f5 100644 --- a/src/main/java/org/opensearch/agent/tools/utils/ToolHelper.java +++ b/src/main/java/org/opensearch/agent/tools/utils/ToolHelper.java @@ -13,8 +13,15 @@ public class ToolHelper { * @param mappingSource the mappings of an index * @param fieldsToType the result containing the field to fieldType mapping * @param prefix the parent field path + * @param includeFields whether include the `fields` in a text type field, for some use case like PPLTool, `fields` in a text type field + * cannot be included, but for CreateAnomalyDetectorTool, `fields` must be included. */ - public static void extractFieldNamesTypes(Map mappingSource, Map fieldsToType, String prefix) { + public static void extractFieldNamesTypes( + Map mappingSource, + Map fieldsToType, + String prefix, + boolean includeFields + ) { if (prefix.length() > 0) { prefix += "."; } @@ -26,15 +33,17 @@ public static void extractFieldNamesTypes(Map mappingSource, Map if (v instanceof Map) { Map vMap = (Map) v; if (vMap.containsKey("type")) { - if (!((vMap.getOrDefault("type", "")).equals("alias"))) { + String fieldType = (String) vMap.getOrDefault("type", ""); + // no need to extract alias into the result, and for object field, extract the subfields only + if (!fieldType.equals("alias") && !fieldType.equals("object")) { fieldsToType.put(prefix + n, (String) vMap.get("type")); } } if (vMap.containsKey("properties")) { - extractFieldNamesTypes((Map) vMap.get("properties"), fieldsToType, prefix + n); + extractFieldNamesTypes((Map) vMap.get("properties"), fieldsToType, prefix + n, includeFields); } - if (vMap.containsKey("fields")) { - extractFieldNamesTypes((Map) vMap.get("fields"), fieldsToType, prefix + n); + if (includeFields && vMap.containsKey("fields")) { + extractFieldNamesTypes((Map) vMap.get("fields"), fieldsToType, prefix + n, true); } } } diff --git a/src/test/java/org/opensearch/agent/tools/ToolHelperTests.java b/src/test/java/org/opensearch/agent/tools/ToolHelperTests.java new file mode 100644 index 00000000..5b6dfa7f --- /dev/null +++ b/src/test/java/org/opensearch/agent/tools/ToolHelperTests.java @@ -0,0 +1,90 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.junit.Assert.assertEquals; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.Test; +import org.opensearch.agent.tools.utils.ToolHelper; + +import lombok.extern.log4j.Log4j2; + +@Log4j2 +public class ToolHelperTests { + @Test + public void TestExtractFieldNamesTypes() { + Map indexMappings = Map + .of( + "response", + Map.of("type", "integer"), + "responseLatency", + Map.of("type", "float"), + "date", + Map.of("type", "date"), + "objectA", + Map.of("type", "object", "properties", Map.of("subA", Map.of("type", "keyword"))), + "objectB", + Map.of("properties", Map.of("subB", Map.of("type", "keyword"))), + "textC", + Map.of("type", "text", "fields", Map.of("subC", Map.of("type", "keyword"))), + "aliasD", + Map.of("type", "alias", "path", "date") + ); + Map result = new HashMap<>(); + ToolHelper.extractFieldNamesTypes(indexMappings, result, "", true); + assertMapEquals( + result, + Map + .of( + "response", + "integer", + "responseLatency", + "float", + "date", + "date", + "objectA.subA", + "keyword", + "objectB.subB", + "keyword", + "textC", + "text", + "textC.subC", + "keyword" + ) + ); + + Map result1 = new HashMap<>(); + ToolHelper.extractFieldNamesTypes(indexMappings, result1, "", false); + assertMapEquals( + result1, + Map + .of( + "response", + "integer", + "responseLatency", + "float", + "date", + "date", + "objectA.subA", + "keyword", + "objectB.subB", + "keyword", + "textC", + "text" + ) + ); + } + + private void assertMapEquals(Map expected, Map actual) { + assertEquals(expected.size(), actual.size()); + for (Map.Entry entry : expected.entrySet()) { + assertEquals(entry.getValue(), actual.get(entry.getKey())); + } + } +} From fc9ae9399b835d70a6d9f6ec1ebf8394abbcafff Mon Sep 17 00:00:00 2001 From: zane-neo Date: Sat, 3 Aug 2024 10:07:48 +0800 Subject: [PATCH 108/119] [Backport] Remove ppl tool execution setting (#383) * Remove ppl tool execution setting Signed-off-by: zane-neo * fix failure UTs Signed-off-by: zane-neo * backport 381 to 2.x Signed-off-by: zane-neo --------- Signed-off-by: zane-neo --- .../java/org/opensearch/agent/ToolPlugin.java | 12 +------ .../agent/common/SkillSettings.java | 22 ------------ .../org/opensearch/agent/tools/PPLTool.java | 23 ++---------- .../tools/utils/ClusterSettingHelper.java | 35 ------------------- .../opensearch/agent/tools/PPLToolTests.java | 35 +------------------ .../integTest/BaseAgentToolsIT.java | 1 - .../org/opensearch/integTest/PPLToolIT.java | 8 ----- 7 files changed, 5 insertions(+), 131 deletions(-) delete mode 100644 src/main/java/org/opensearch/agent/common/SkillSettings.java delete mode 100644 src/main/java/org/opensearch/agent/tools/utils/ClusterSettingHelper.java diff --git a/src/main/java/org/opensearch/agent/ToolPlugin.java b/src/main/java/org/opensearch/agent/ToolPlugin.java index 74ff6bf4..ac0aa484 100644 --- a/src/main/java/org/opensearch/agent/ToolPlugin.java +++ b/src/main/java/org/opensearch/agent/ToolPlugin.java @@ -10,7 +10,6 @@ import java.util.List; import java.util.function.Supplier; -import org.opensearch.agent.common.SkillSettings; import org.opensearch.agent.tools.CreateAnomalyDetectorTool; import org.opensearch.agent.tools.NeuralSparseSearchTool; import org.opensearch.agent.tools.PPLTool; @@ -20,12 +19,9 @@ import org.opensearch.agent.tools.SearchAnomalyResultsTool; import org.opensearch.agent.tools.SearchMonitorsTool; import org.opensearch.agent.tools.VectorDBTool; -import org.opensearch.agent.tools.utils.ClusterSettingHelper; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.settings.Setting; -import org.opensearch.common.settings.Settings; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; @@ -64,9 +60,7 @@ public Collection createComponents( this.client = client; this.clusterService = clusterService; this.xContentRegistry = xContentRegistry; - Settings settings = environment.settings(); - ClusterSettingHelper clusterSettingHelper = new ClusterSettingHelper(settings, clusterService); - PPLTool.Factory.getInstance().init(client, clusterSettingHelper); + PPLTool.Factory.getInstance().init(client); NeuralSparseSearchTool.Factory.getInstance().init(client, xContentRegistry); VectorDBTool.Factory.getInstance().init(client, xContentRegistry); RAGTool.Factory.getInstance().init(client, xContentRegistry); @@ -94,8 +88,4 @@ public List> getToolFactories() { ); } - @Override - public List> getSettings() { - return List.of(SkillSettings.PPL_EXECUTION_ENABLED); - } } diff --git a/src/main/java/org/opensearch/agent/common/SkillSettings.java b/src/main/java/org/opensearch/agent/common/SkillSettings.java deleted file mode 100644 index 55808748..00000000 --- a/src/main/java/org/opensearch/agent/common/SkillSettings.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.agent.common; - -import org.opensearch.common.settings.Setting; - -/** - * Settings for skills plugin - */ -public final class SkillSettings { - - private SkillSettings() {} - - /** - * This setting controls whether PPL execution is enabled or not - */ - public static final Setting PPL_EXECUTION_ENABLED = Setting - .boolSetting("plugins.skills.ppl_execution_enabled", false, Setting.Property.NodeScope, Setting.Property.Dynamic); -} diff --git a/src/main/java/org/opensearch/agent/tools/PPLTool.java b/src/main/java/org/opensearch/agent/tools/PPLTool.java index 8a8d09f7..621426ac 100644 --- a/src/main/java/org/opensearch/agent/tools/PPLTool.java +++ b/src/main/java/org/opensearch/agent/tools/PPLTool.java @@ -31,8 +31,6 @@ import org.opensearch.action.ActionRequest; import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.opensearch.action.search.SearchRequest; -import org.opensearch.agent.common.SkillSettings; -import org.opensearch.agent.tools.utils.ClusterSettingHelper; import org.opensearch.agent.tools.utils.ToolHelper; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.MappingMetadata; @@ -98,9 +96,7 @@ public class PPLTool implements Tool { private int head; - private ClusterSettingHelper clusterSettingHelper; - - private static Gson gson = new Gson(); + private static Gson gson = org.opensearch.ml.common.utils.StringUtils.gson; private static Map DEFAULT_PROMPT_DICT; @@ -153,7 +149,6 @@ public static PPLModelType from(String value) { public PPLTool( Client client, - ClusterSettingHelper clusterSettingHelper, String modelId, String contextPrompt, String pplModelType, @@ -172,7 +167,6 @@ public PPLTool( this.previousToolKey = previousToolKey; this.head = head; this.execute = execute; - this.clusterSettingHelper = clusterSettingHelper; } @SuppressWarnings("unchecked") @@ -222,14 +216,7 @@ public void run(Map parameters, ActionListener listener) ModelTensor modelTensor = modelTensors.getMlModelTensors().get(0); Map dataAsMap = (Map) modelTensor.getDataAsMap(); String ppl = parseOutput(dataAsMap.get("response"), indexName); - boolean pplExecutedEnabled = clusterSettingHelper.getClusterSettings(SkillSettings.PPL_EXECUTION_ENABLED); - if (!pplExecutedEnabled || !this.execute) { - if (!pplExecutedEnabled) { - log - .debug( - "PPL execution is disabled, the query will be returned directly, to enable this, please set plugins.skills.ppl_execution_enabled to true" - ); - } + if (!this.execute) { Map ret = ImmutableMap.of("ppl", ppl); listener.onResponse((T) AccessController.doPrivileged((PrivilegedExceptionAction) () -> gson.toJson(ret))); return; @@ -298,8 +285,6 @@ public boolean validate(Map parameters) { public static class Factory implements Tool.Factory { private Client client; - private ClusterSettingHelper clusterSettingHelper; - private static Factory INSTANCE; public static Factory getInstance() { @@ -315,9 +300,8 @@ public static Factory getInstance() { } } - public void init(Client client, ClusterSettingHelper clusterSettingHelper) { + public void init(Client client) { this.client = client; - this.clusterSettingHelper = clusterSettingHelper; } @Override @@ -325,7 +309,6 @@ public PPLTool create(Map map) { validatePPLToolParameters(map); return new PPLTool( client, - clusterSettingHelper, (String) map.get("model_id"), (String) map.getOrDefault("prompt", ""), (String) map.getOrDefault("model_type", ""), diff --git a/src/main/java/org/opensearch/agent/tools/utils/ClusterSettingHelper.java b/src/main/java/org/opensearch/agent/tools/utils/ClusterSettingHelper.java deleted file mode 100644 index 92bf9dcd..00000000 --- a/src/main/java/org/opensearch/agent/tools/utils/ClusterSettingHelper.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.agent.tools.utils; - -import java.util.Optional; - -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.settings.Setting; -import org.opensearch.common.settings.Settings; - -import lombok.AllArgsConstructor; - -/** - * This class is to encapsulate the {@link Settings} and {@link ClusterService} and provide a general method to retrieve dynamical cluster settings conveniently. - */ -@AllArgsConstructor -public class ClusterSettingHelper { - - private Settings settings; - - private ClusterService clusterService; - - /** - * Retrieves the cluster settings for the specified setting. - * - * @param setting the setting to retrieve cluster settings for - * @return the cluster setting value, or the default setting value if not found - */ - public T getClusterSettings(Setting setting) { - return Optional.ofNullable(clusterService.getClusterSettings().get(setting)).orElse(setting.get(settings)); - } -} diff --git a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java index 8e2c3aaa..ae1baa31 100644 --- a/src/test/java/org/opensearch/agent/tools/PPLToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/PPLToolTests.java @@ -9,7 +9,6 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.opensearch.ml.common.CommonValue.ML_CONNECTOR_INDEX; import static org.opensearch.ml.common.utils.StringUtils.gson; @@ -17,7 +16,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; -import java.util.Set; import org.apache.lucene.search.TotalHits; import org.junit.Before; @@ -26,15 +24,10 @@ import org.mockito.MockitoAnnotations; import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.opensearch.action.search.SearchResponse; -import org.opensearch.agent.common.SkillSettings; -import org.opensearch.agent.tools.utils.ClusterSettingHelper; import org.opensearch.client.AdminClient; import org.opensearch.client.Client; import org.opensearch.client.IndicesAdminClient; import org.opensearch.cluster.metadata.MappingMetadata; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.settings.ClusterSettings; -import org.opensearch.common.settings.Settings; import org.opensearch.core.action.ActionListener; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; @@ -128,13 +121,7 @@ public void setup() { listener.onResponse(transportPPLQueryResponse); return null; }).when(client).execute(eq(PPLQueryAction.INSTANCE), any(), any()); - - Settings settings = Settings.builder().put(SkillSettings.PPL_EXECUTION_ENABLED.getKey(), true).build(); - ClusterService clusterService = mock(ClusterService.class); - when(clusterService.getSettings()).thenReturn(settings); - when(clusterService.getClusterSettings()).thenReturn(new ClusterSettings(settings, Set.of(SkillSettings.PPL_EXECUTION_ENABLED))); - ClusterSettingHelper clusterSettingHelper = new ClusterSettingHelper(settings, clusterService); - PPLTool.Factory.getInstance().init(client, clusterSettingHelper); + PPLTool.Factory.getInstance().init(client); } @Test @@ -413,26 +400,6 @@ public void testTool_executePPLFailure() { ); } - @Test - public void test_pplTool_whenPPLExecutionDisabled_returnOnlyContainsPPL() { - Settings settings = Settings.builder().put(SkillSettings.PPL_EXECUTION_ENABLED.getKey(), false).build(); - ClusterService clusterService = mock(ClusterService.class); - when(clusterService.getSettings()).thenReturn(settings); - when(clusterService.getClusterSettings()).thenReturn(new ClusterSettings(settings, Set.of(SkillSettings.PPL_EXECUTION_ENABLED))); - ClusterSettingHelper clusterSettingHelper = new ClusterSettingHelper(settings, clusterService); - PPLTool.Factory.getInstance().init(client, clusterSettingHelper); - PPLTool tool = PPLTool.Factory - .getInstance() - .create(ImmutableMap.of("model_id", "modelId", "prompt", "contextPrompt", "head", "100")); - assertEquals(PPLTool.TYPE, tool.getName()); - - tool.run(ImmutableMap.of("index", "demo", "question", "demo"), ActionListener.wrap(executePPLResult -> { - Map returnResults = gson.fromJson(executePPLResult, Map.class); - assertNull(returnResults.get("executionResult")); - assertEquals("source=demo| head 1", returnResults.get("ppl")); - }, log::error)); - } - private void createMappings() { indexMappings = new HashMap<>(); indexMappings diff --git a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java index 853a2974..658a3fc7 100644 --- a/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java +++ b/src/test/java/org/opensearch/integTest/BaseAgentToolsIT.java @@ -63,7 +63,6 @@ public void updateClusterSettings() { updateClusterSettings("plugins.ml_commons.jvm_heap_memory_threshold", 100); updateClusterSettings("plugins.ml_commons.allow_registering_model_via_url", true); updateClusterSettings("plugins.ml_commons.agent_framework_enabled", true); - updateClusterSettings("plugins.skills.ppl_execution_enabled", true); } @SneakyThrows diff --git a/src/test/java/org/opensearch/integTest/PPLToolIT.java b/src/test/java/org/opensearch/integTest/PPLToolIT.java index b208e1f2..46cbd864 100644 --- a/src/test/java/org/opensearch/integTest/PPLToolIT.java +++ b/src/test/java/org/opensearch/integTest/PPLToolIT.java @@ -58,14 +58,6 @@ public void testPPLTool() { ); } - public void test_PPLTool_whenPPLExecutionDisabled_ResultOnlyContainsPPL() { - updateClusterSettings("plugins.skills.ppl_execution_enabled", false); - prepareIndex(); - String agentId = registerAgent(); - String result = executeAgent(agentId, "{\"parameters\": {\"question\": \"correct\", \"index\": \"employee\"}}"); - assertEquals("{\"ppl\":\"source\\u003demployee| where age \\u003e 56 | stats COUNT() as cnt\"}", result); - } - public void testPPLTool_withWrongPPLGenerated_thenThrowException() { prepareIndex(); String agentId = registerAgent(); From fc23773ea54b8ce6bb3c303a08cfb83c2f6d4b80 Mon Sep 17 00:00:00 2001 From: zane-neo Date: Wed, 28 Aug 2024 15:42:54 +0800 Subject: [PATCH 109/119] Fix 2.17 compilation error since AD change (#388) * Fix 2.17 compilation error since AD change Signed-off-by: zane-neo * Fix 2.17 compilation error since AD change Signed-off-by: zane-neo --------- Signed-off-by: zane-neo --- build.gradle | 2 +- .../org/opensearch/agent/tools/CreateAnomalyDetectorTool.java | 2 +- .../opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index 4708277f..b07d4029 100644 --- a/build.gradle +++ b/build.gradle @@ -11,7 +11,7 @@ import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin buildscript { ext { opensearch_group = "org.opensearch" - opensearch_version = System.getProperty("opensearch.version", "2.16.0-SNAPSHOT") + opensearch_version = System.getProperty("opensearch.version", "2.17.0-SNAPSHOT") buildVersionQualifier = System.getProperty("build.version_qualifier", "") isSnapshot = "true" == System.getProperty("build.snapshot", "true") version_tokens = opensearch_version.tokenize('-') diff --git a/src/main/java/org/opensearch/agent/tools/CreateAnomalyDetectorTool.java b/src/main/java/org/opensearch/agent/tools/CreateAnomalyDetectorTool.java index 52811e61..bd018698 100644 --- a/src/main/java/org/opensearch/agent/tools/CreateAnomalyDetectorTool.java +++ b/src/main/java/org/opensearch/agent/tools/CreateAnomalyDetectorTool.java @@ -31,6 +31,7 @@ import org.opensearch.client.Client; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.Strings; import org.opensearch.ml.common.FunctionName; import org.opensearch.ml.common.dataset.remote.RemoteInferenceInputDataSet; import org.opensearch.ml.common.input.MLInput; @@ -44,7 +45,6 @@ import com.google.common.collect.ImmutableMap; -import joptsimple.internal.Strings; import lombok.Getter; import lombok.Setter; import lombok.extern.log4j.Log4j2; diff --git a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java index e0b04336..f7ab651e 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java @@ -93,6 +93,7 @@ public void setup() { null, null, null, + null, null ); } From 10b7f8bf26ef29d8dbaa664ab612c12105ec52ed Mon Sep 17 00:00:00 2001 From: zane-neo Date: Wed, 28 Aug 2024 15:48:20 +0800 Subject: [PATCH 110/119] backport #256 to 2.x (#367) Signed-off-by: zane-neo --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index b07d4029..84aef578 100644 --- a/build.gradle +++ b/build.gradle @@ -115,7 +115,7 @@ task addJarsToClasspath(type: Copy) { dependencies { // 3P dependencies compileOnly group: 'com.google.code.gson', name: 'gson', version: '2.10.1' - compileOnly "org.apache.logging.log4j:log4j-slf4j-impl:2.22.0" + compileOnly "org.apache.logging.log4j:log4j-slf4j-impl:2.23.1" compileOnly group: 'org.json', name: 'json', version: '20240205' compileOnly("com.google.guava:guava:33.0.0-jre") compileOnly group: 'org.apache.commons', name: 'commons-lang3', version: '3.14.0' From 69aa50e23bb0e23a69a71938d78ec93ede820372 Mon Sep 17 00:00:00 2001 From: zane-neo Date: Wed, 28 Aug 2024 15:48:29 +0800 Subject: [PATCH 111/119] backport #258 to 2.x (#370) Signed-off-by: zane-neo --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index 84aef578..bc301d28 100644 --- a/build.gradle +++ b/build.gradle @@ -87,7 +87,7 @@ configurations { all { resolutionStrategy { force "org.mockito:mockito-core:${versions.mockito}" - force "com.google.guava:guava:33.0.0-jre" // CVE for 31.1 + force "com.google.guava:guava:33.2.1-jre" // CVE for 31.1 force("org.eclipse.platform:org.eclipse.core.runtime:3.30.0") // CVE for < 3.29.0, forces JDK17 for spotless } } @@ -117,7 +117,7 @@ dependencies { compileOnly group: 'com.google.code.gson', name: 'gson', version: '2.10.1' compileOnly "org.apache.logging.log4j:log4j-slf4j-impl:2.23.1" compileOnly group: 'org.json', name: 'json', version: '20240205' - compileOnly("com.google.guava:guava:33.0.0-jre") + compileOnly("com.google.guava:guava:33.2.1-jre") compileOnly group: 'org.apache.commons', name: 'commons-lang3', version: '3.14.0' compileOnly group: 'org.apache.commons', name: 'commons-text', version: '1.11.0' From 3cbb1f4d1e1028208e2321d8431c4121804223f6 Mon Sep 17 00:00:00 2001 From: zane-neo Date: Wed, 28 Aug 2024 16:17:37 +0800 Subject: [PATCH 112/119] Upgrade apache common lang version to 3.16 (#390) Signed-off-by: zane-neo --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index bc301d28..9189e4db 100644 --- a/build.gradle +++ b/build.gradle @@ -118,7 +118,7 @@ dependencies { compileOnly "org.apache.logging.log4j:log4j-slf4j-impl:2.23.1" compileOnly group: 'org.json', name: 'json', version: '20240205' compileOnly("com.google.guava:guava:33.2.1-jre") - compileOnly group: 'org.apache.commons', name: 'commons-lang3', version: '3.14.0' + compileOnly group: 'org.apache.commons', name: 'commons-lang3', version: '3.16.0' compileOnly group: 'org.apache.commons', name: 'commons-text', version: '1.11.0' // Plugin dependencies From 6b399d084e6b6e775e65b1897df2ffcd742adf58 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Thu, 29 Aug 2024 13:01:50 +0800 Subject: [PATCH 113/119] chore(deps): update dependency gradle to v8.10 (#389) (#392) (cherry picked from commit 1d43c5d08231e65ab4e1f91aa26f5484cbe49d67) Signed-off-by: mend-for-github-com[bot] Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] Co-authored-by: mend-for-github-com[bot] <50673670+mend-for-github-com[bot]@users.noreply.github.com> --- gradle/wrapper/gradle-wrapper.jar | Bin 43504 -> 43583 bytes gradle/wrapper/gradle-wrapper.properties | 4 ++-- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 2c3521197d7c4586c843d1d3e9090525f1898cde..a4b76b9530d66f5e68d973ea569d8e19de379189 100644 GIT binary patch delta 3990 zcmV;H4{7l5(*nQL0Kr1kzC=_KMxQY0|W5(lc#i zH*M1^P4B}|{x<+fkObwl)u#`$GxKKV&3pg*-y6R6txw)0qU|Clf9Uds3x{_-**c=7 z&*)~RHPM>Rw#Hi1R({;bX|7?J@w}DMF>dQQU2}9yj%iLjJ*KD6IEB2^n#gK7M~}6R zkH+)bc--JU^pV~7W=3{E*4|ZFpDpBa7;wh4_%;?XM-5ZgZNnVJ=vm!%a2CdQb?oTa z70>8rTb~M$5Tp!Se+4_OKWOB1LF+7gv~$$fGC95ToUM(I>vrd$>9|@h=O?eARj0MH zT4zo(M>`LWoYvE>pXvqG=d96D-4?VySz~=tPVNyD$XMshoTX(1ZLB5OU!I2OI{kb) zS8$B8Qm>wLT6diNnyJZC?yp{Kn67S{TCOt-!OonOK7$K)e-13U9GlnQXPAb&SJ0#3 z+vs~+4Qovv(%i8g$I#FCpCG^C4DdyQw3phJ(f#y*pvNDQCRZ~MvW<}fUs~PL=4??j zmhPyg<*I4RbTz|NHFE-DC7lf2=}-sGkE5e!RM%3ohM7_I^IF=?O{m*uUPH(V?gqyc(Rp?-Qu(3bBIL4Fz(v?=_Sh?LbK{nqZMD>#9D_hNhaV$0ef3@9V90|0u#|PUNTO>$F=qRhg1duaE z0`v~X3G{8RVT@kOa-pU+z8{JWyP6GF*u2e8eKr7a2t1fuqQy)@d|Qn(%YLZ62TWtoX@$nL}9?atE#Yw`rd(>cr0gY;dT9~^oL;u)zgHUvxc2I*b&ZkGM-iq=&(?kyO(3}=P! zRp=rErEyMT5UE9GjPHZ#T<`cnD)jyIL!8P{H@IU#`e8cAG5jMK zVyKw7--dAC;?-qEu*rMr$5@y535qZ6p(R#+fLA_)G~!wnT~~)|s`}&fA(s6xXN`9j zP#Fd3GBa#HeS{5&8p?%DKUyN^X9cYUc6vq}D_3xJ&d@=6j(6BZKPl?!k1?!`f3z&a zR4ZF60Mx7oBxLSxGuzA*Dy5n-d2K=+)6VMZh_0KetK|{e;E{8NJJ!)=_E~1uu=A=r zrn&gh)h*SFhsQJo!f+wKMIE;-EOaMSMB@aXRU(UcnJhZW^B^mgs|M9@5WF@s6B0p& zm#CTz)yiQCgURE{%hjxHcJ6G&>G9i`7MyftL!QQd5 z@RflRs?7)99?X`kHNt>W3l7YqscBpi*R2+fsgABor>KVOu(i(`03aytf2UA!&SC9v z!E}whj#^9~=XHMinFZ;6UOJjo=mmNaWkv~nC=qH9$s-8roGeyaW-E~SzZ3Gg>j zZ8}<320rg4=$`M0nxN!w(PtHUjeeU?MvYgWKZ6kkzABK;vMN0|U;X9abJleJA(xy<}5h5P(5 z{RzAFPvMnX2m0yH0Jn2Uo-p`daE|(O`YQiC#jB8;6bVIUf?SY(k$#C0`d6qT`>Xe0+0}Oj0=F&*D;PVe=Z<=0AGI<6$gYLwa#r` zm449x*fU;_+J>Mz!wa;T-wldoBB%&OEMJgtm#oaI60TSYCy7;+$5?q!zi5K`u66Wq zvg)Fx$s`V3Em{=OEY{3lmh_7|08ykS&U9w!kp@Ctuzqe1JFOGz6%i5}Kmm9>^=gih z?kRxqLA<3@e=}G4R_?phW{4DVr?`tPfyZSN@R=^;P;?!2bh~F1I|fB7P=V=9a6XU5 z<#0f>RS0O&rhc&nTRFOW7&QhevP0#>j0eq<1@D5yAlgMl5n&O9X|Vq}%RX}iNyRFF z7sX&u#6?E~bm~N|z&YikXC=I0E*8Z$v7PtWfjy)$e_Ez25fnR1Q=q1`;U!~U>|&YS zaOS8y!^ORmr2L4ik!IYR8@Dcx8MTC=(b4P6iE5CnrbI~7j7DmM8em$!da&D!6Xu)!vKPdLG z9f#)se|6=5yOCe)N6xDhPI!m81*dNe7u985zi%IVfOfJh69+#ag4ELzGne?o`eA`42K4T)h3S+s)5IT97%O>du- z0U54L8m4}rkRQ?QBfJ%DLssy^+a7Ajw;0&`NOTY4o;0-ivm9 zBz1C%nr_hQ)X)^QM6T1?=yeLkuG9Lf50(eH}`tFye;01&(p?8i+6h};VV-2B~qdxeC#=X z(JLlzy&fHkyi9Ksbcs~&r^%lh^2COldLz^H@X!s~mr9Dr6z!j+4?zkD@Ls7F8(t(f z9`U?P$Lmn*Y{K}aR4N&1N=?xtQ1%jqf1~pJyQ4SgBrEtR`j4lQuh7cqP49Em5cO=I zB(He2`iPN5M=Y0}h(IU$37ANTGx&|b-u1BYA*#dE(L-lptoOpo&th~E)_)y-`6kSH z3vvyVrcBwW^_XYReJ=JYd9OBQrzv;f2AQdZH#$Y{Y+Oa33M70XFI((fs;mB4e`<<{ ze4dv2B0V_?Ytsi>>g%qs*}oDGd5d(RNZ*6?7qNbdp7wP4T72=F&r?Ud#kZr8Ze5tB z_oNb7{G+(o2ajL$!69FW@jjPQ2a5C)m!MKKRirC$_VYIuVQCpf9rIms0GRDf)8AH${I`q^~5rjot@#3$2#zT2f`(N^P7Z;6(@EK$q*Jgif00I6*^ZGV+XB5uw*1R-@23yTw&WKD{s1;HTL;dO)%5i#`dc6b7;5@^{KU%N|A-$zsYw4)7LA{3`Zp>1 z-?K9_IE&z)dayUM)wd8K^29m-l$lFhi$zj0l!u~4;VGR6Y!?MAfBC^?QD53hy6VdD z@eUZIui}~L%#SmajaRq1J|#> z4m=o$vZ*34=ZWK2!QMNEcp2Lbc5N1q!lEDq(bz0b;WI9;e>l=CG9^n#ro`w>_0F$Q zfZ={2QyTkfByC&gy;x!r*NyXXbk=a%~~(#K?< zTke0HuF5{Q+~?@!KDXR|g+43$+;ab`^flS%miup_0OUTm=nIc%d5nLP)i308PIjl_YMF6cpQ__6&$n6it8K- z8PIjl_YMF6cpQ_!r)L8IivW`WdK8mBs6PXdjR2DYdK8nCs73=4j{uVadK8oNjwX|E wpAeHLsTu^*Y>Trk?aBtSQ(D-o$(D8Px^?ZI-PUB? z*1fv!{YdHme3Fc8%cR@*@zc5A_nq&2=R47Hp@$-JF4Fz*;SLw5}K^y>s-s;V!}b2i=5=M- zComP?ju>8Fe@=H@rlwe1l`J*6BTTo`9b$zjQ@HxrAhp0D#u?M~TxGC_!?ccCHCjt| zF*PgJf@kJB`|Ml}cmsyrAjO#Kjr^E5p29w+#>$C`Q|54BoDv$fQ9D?3n32P9LPMIzu?LjNqggOH=1@T{9bMn*u8(GI z!;MLTtFPHal^S>VcJdiYqX0VU|Rn@A}C1xOlxCribxes0~+n2 z6qDaIA2$?e`opx3_KW!rAgbpzU)gFdjAKXh|5w``#F0R|c)Y)Du0_Ihhz^S?k^pk% zP>9|pIDx)xHH^_~+aA=^$M!<8K~Hy(71nJGf6`HnjtS=4X4=Hk^O71oNia2V{HUCC zoN3RSBS?mZCLw;l4W4a+D8qc)XJS`pUJ5X-f^1ytxwr`@si$lAE?{4G|o; zO0l>`rr?;~c;{ZEFJ!!3=7=FdGJ?Q^xfNQh4A?i;IJ4}B+A?4olTK(fN++3CRBP97 ze~lG9h%oegkn)lpW-4F8o2`*WW0mZHwHez`ko@>U1_;EC_6ig|Drn@=DMV9YEUSCa zIf$kHei3(u#zm9I!Jf(4t`Vm1lltJ&lVHy(eIXE8sy9sUpmz%I_gA#8x^Zv8%w?r2 z{GdkX1SkzRIr>prRK@rqn9j2wG|rUvf6PJbbin=yy-TAXrguvzN8jL$hUrIXzr^s5 zVM?H4;eM-QeRFr06@ifV(ocvk?_)~N@1c2ien56UjWXid6W%6ievIh)>dk|rIs##^kY67ib8Kw%#-oVFaXG7$ERyA9(NSJUvWiOA5H(!{uOpcW zg&-?iqPhds%3%tFspHDqqr;A!e@B#iPQjHd=c>N1LoOEGRehVoPOdxJ>b6>yc#o#+ zl8s8!(|NMeqjsy@0x{8^j0d00SqRZjp{Kj)&4UHYGxG+z9b-)72I*&J70?+8e?p_@ z=>-(>l6z5vYlP~<2%DU02b!mA{7mS)NS_eLe=t)sm&+Pmk?asOEKlkPQ)EUvvfC=;4M&*|I!w}(@V_)eUKLA_t^%`o z0PM9LV|UKTLnk|?M3u!|f2S0?UqZsEIH9*NJS-8lzu;A6-rr-ot=dg9SASoluZUkFH$7X; zP=?kYX!K?JL-b~<#7wU;b;eS)O;@?h%sPPk{4xEBxb{!sm0AY|f9cNvx6>$3F!*0c z75H=dy8JvTyO8}g1w{$9T$p~5en}AeSLoCF>_RT9YPMpChUjl310o*$QocjbH& zbnwg#gssR#jDVN{uEi3n(PZ%PFZ|6J2 z5_rBf0-u>e4sFe0*Km49ATi7>Kn0f9!uc|rRMR1Dtt6m1LW8^>qFlo}h$@br=Rmpi z;mI&>OF64Be{dVeHI8utrh)v^wsZ0jii%x8UgZ8TC%K~@I(4E};GFW&(;WVov}3%H zH;IhRkfD^(vt^DjZz(MyHLZxv8}qzPc(%itBkBwf_fC~sDBgh<3XAv5cxxfF3<2U! z03Xe&z`is!JDHbe;mNmfkH+_LFE*I2^mdL@7(@9DfAcP6O04V-ko;Rpgp<%Cj5r8Z zd0`sXoIjV$j)--;jA6Zy^D5&5v$o^>e%>Q?9GLm{i~p^lAn!%ZtF$I~>39XVZxk0b zROh^Bk9cE0AJBLozZIEmy7xG(yHWGztvfnr0(2ro1%>zsGMS^EMu+S$r=_;9 zWwZkgf7Q7`H9sLf2Go^Xy6&h~a&%s2_T@_Csf19MntF$aVFiFkvE3_hUg(B@&Xw@YJ zpL$wNYf78=0c@!QU6_a$>CPiXT7QAGDM}7Z(0z#_ZA=fmLUj{2z7@Ypo71UDy8GHr z-&TLKf6a5WCf@Adle3VglBt4>Z>;xF}}-S~B7<(%B;Y z0QR55{z-buw>8ilNM3u6I+D$S%?)(p>=eBx-HpvZj{7c*_?K=d()*7q?93us}1dq%FAFYLsW8ZTQ_XZLh`P2*6(NgS}qGcfGXVWpwsp#Rs}IuKbk*`2}&) zI^Vsk6S&Q4@oYS?dJ`NwMVBs6f57+RxdqVub#PvMu?$=^OJy5xEl0<5SLsSRy%%a0 zi}Y#1-F3m;Ieh#Y12UgW?-R)|eX>ZuF-2cc!1>~NS|XSF-6In>zBoZg+ml!6%fk7U zw0LHcz8VQk(jOJ+Yu)|^|15ufl$KQd_1eUZZzj`aC%umU6F1&D5XVWce_wAe(qCSZ zpX-QF4e{EmEVN9~6%bR5U*UT{eMHfcUo`jw*u?4r2s_$`}U{?NjvEm(u&<>B|%mq$Q3weshxk z76<``8vh{+nX`@9CB6IE&z)I%IFjR^LH{s1p|eppv=x za(g_jLU|xjWMAn-V7th$f({|LG8zzIE0g?cyW;%Dmtv%C+0@xVxPE^ zyZzi9P%JAD6ynwHptuzP`Kox7*9h7XSMonCalv;Md0i9Vb-c*!f0ubfk?&T&T}AHh z4m8Bz{JllKcdNg?D^%a5MFQ;#1z|*}H^qHLzW)L}wp?2tY7RejtSh8<;Zw)QGJYUm z|MbTxyj*McKlStlT9I5XlSWtQGN&-LTr2XyNU+`490rg?LYLMRnz-@oKqT1hpCGqP zyRXt4=_Woj$%n5ee<3zhLF>5>`?m9a#xQH+Jk_+|RM8Vi;2*XbK- zEL6sCpaGPzP>k8f4Kh|##_imt#zJMB;ir|JrMPGW`rityK1vHXMLy18%qmMQAm4WZ zP)i30KR&5vs15)C+8dM66&$k~i|ZT;KR&5vs15)C+8dJ(sAmGPijyIz6_bsqKLSFH zlOd=TljEpH0>h4zA*dCTK&emy#FCRCs1=i^sZ9bFmXjf<6_X39E(XY)00000#N437 diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 68e8816d..2b189974 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=d725d707bfabd4dfdc958c624003b3c80accc03f7037b5122c4b1d0ef15cecab -distributionUrl=https\://services.gradle.org/distributions/gradle-8.9-bin.zip +distributionSha256Sum=5b9c5eb3f9fc2c94abaea57d90bd78747ca117ddbbf96c859d3741181a12bf2a +distributionUrl=https\://services.gradle.org/distributions/gradle-8.10-bin.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME From 76d65e3fd99ef702eebad0d3b4a9b4b76d10a109 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Thu, 29 Aug 2024 13:31:43 +0800 Subject: [PATCH 114/119] chore(deps): update plugin io.freefair.lombok to v8.10 (#393) (#394) (cherry picked from commit 9989ac6ccaf673f0e8363f1b7486e13127e74d11) Signed-off-by: mend-for-github-com[bot] Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] Co-authored-by: mend-for-github-com[bot] <50673670+mend-for-github-com[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 9189e4db..6c160c8a 100644 --- a/build.gradle +++ b/build.gradle @@ -43,7 +43,7 @@ buildscript { plugins { id 'java-library' id 'com.diffplug.spotless' version '6.25.0' - id "io.freefair.lombok" version "8.6" + id "io.freefair.lombok" version "8.10" id "de.undercouch.download" version "5.6.0" } From 5e0fdf5bbba31a732a6c1d9bbbebf78448f6f595 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Sat, 7 Sep 2024 08:10:38 +0800 Subject: [PATCH 115/119] Add 2.17 release note (#401) (#402) * Add 2.17 release note * change dependencies to maintenance --------- (cherry picked from commit d4051e56df0315a52f0784fb932c9b75d93190ef) Signed-off-by: zane-neo Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../opensearch-skills.release-notes-2.17.0.0.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 release-notes/opensearch-skills.release-notes-2.17.0.0.md diff --git a/release-notes/opensearch-skills.release-notes-2.17.0.0.md b/release-notes/opensearch-skills.release-notes-2.17.0.0.md new file mode 100644 index 00000000..46199f69 --- /dev/null +++ b/release-notes/opensearch-skills.release-notes-2.17.0.0.md @@ -0,0 +1,12 @@ +# 2024-09-07 Version 2.17.0.0 + +Compatible with OpenSearch 2.17.0 + +### Maintenance +update dependency org.apache.logging.log4j:log4j-slf4j-impl to v2.23.1 ([#256](https://github.com/opensearch-project/skills/pull/256)) +update dependency com.google.guava:guava to v33.2.1-jre ([#258](https://github.com/opensearch-project/skills/pull/258)) +Upgrade apache common lang version to 3.16 ([#371](https://github.com/opensearch-project/skills/pull/371)) +update dependency gradle to v8.10 ([#389](https://github.com/opensearch-project/skills/pull/389)) +update plugin io.freefair.lombok to v8.10 ([#393](https://github.com/opensearch-project/skills/pull/393)) + + From 09dc7dad7eb200e2df3a173e129eb6e2c627ed0d Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Tue, 10 Sep 2024 09:04:23 +0800 Subject: [PATCH 116/119] Fix 2.17 test compilation issue (#406) (#407) (cherry picked from commit 8e73e33f261b62a6bdd9bcf85b48935d6ffac1d9) Signed-off-by: zane-neo Signed-off-by: github-actions[bot] Co-authored-by: github-actions[bot] --- .../opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java index f7ab651e..e0b04336 100644 --- a/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java +++ b/src/test/java/org/opensearch/agent/tools/SearchAnomalyDetectorsToolTests.java @@ -93,7 +93,6 @@ public void setup() { null, null, null, - null, null ); } From e526edea4cb168e475684242e0b261c1d57abce4 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 19:33:33 -0400 Subject: [PATCH 117/119] Increment version to 2.17.1-SNAPSHOT (#409) Signed-off-by: opensearch-ci-bot Co-authored-by: opensearch-ci-bot --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 6c160c8a..f163c41d 100644 --- a/build.gradle +++ b/build.gradle @@ -11,7 +11,7 @@ import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin buildscript { ext { opensearch_group = "org.opensearch" - opensearch_version = System.getProperty("opensearch.version", "2.17.0-SNAPSHOT") + opensearch_version = System.getProperty("opensearch.version", "2.17.1-SNAPSHOT") buildVersionQualifier = System.getProperty("build.version_qualifier", "") isSnapshot = "true" == System.getProperty("build.snapshot", "true") version_tokens = opensearch_version.tokenize('-') From 7cd2c22d0edef0eaa4a80171bd80fa22a91b7b8a Mon Sep 17 00:00:00 2001 From: qianheng Date: Thu, 10 Oct 2024 14:24:06 +0800 Subject: [PATCH 118/119] Add LogPatternTool (#413) (#418) (#422) * Add LogPatternTool (#413) * ut fix: construct update in Monitor * Add LogPatternTool * Address comments * Add function doc * Address comments * Address comments --------- (cherry picked from commit 7d726125bc9704d5a1fa01831f44c7c0d7b4aab5) * Make compatible with java11 * spotlessApply --------- Signed-off-by: Heng Qian --- .../java/org/opensearch/agent/ToolPlugin.java | 5 +- .../agent/tools/AbstractRetrieverTool.java | 5 +- .../agent/tools/LogPatternTool.java | 295 ++++++++++++++++++ .../agent/tools/LogPatternToolTests.java | 58 ++++ 4 files changed, 359 insertions(+), 4 deletions(-) create mode 100644 src/main/java/org/opensearch/agent/tools/LogPatternTool.java create mode 100644 src/test/java/org/opensearch/agent/tools/LogPatternToolTests.java diff --git a/src/main/java/org/opensearch/agent/ToolPlugin.java b/src/main/java/org/opensearch/agent/ToolPlugin.java index ac0aa484..41c8f5c3 100644 --- a/src/main/java/org/opensearch/agent/ToolPlugin.java +++ b/src/main/java/org/opensearch/agent/ToolPlugin.java @@ -11,6 +11,7 @@ import java.util.function.Supplier; import org.opensearch.agent.tools.CreateAnomalyDetectorTool; +import org.opensearch.agent.tools.LogPatternTool; import org.opensearch.agent.tools.NeuralSparseSearchTool; import org.opensearch.agent.tools.PPLTool; import org.opensearch.agent.tools.RAGTool; @@ -69,6 +70,7 @@ public Collection createComponents( SearchAnomalyResultsTool.Factory.getInstance().init(client, namedWriteableRegistry); SearchMonitorsTool.Factory.getInstance().init(client); CreateAnomalyDetectorTool.Factory.getInstance().init(client); + LogPatternTool.Factory.getInstance().init(client, xContentRegistry); return Collections.emptyList(); } @@ -84,7 +86,8 @@ public List> getToolFactories() { SearchAnomalyDetectorsTool.Factory.getInstance(), SearchAnomalyResultsTool.Factory.getInstance(), SearchMonitorsTool.Factory.getInstance(), - CreateAnomalyDetectorTool.Factory.getInstance() + CreateAnomalyDetectorTool.Factory.getInstance(), + LogPatternTool.Factory.getInstance() ); } diff --git a/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java b/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java index f01dde7e..5abcd758 100644 --- a/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java +++ b/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java @@ -77,7 +77,7 @@ private static Map processResponse(SearchHit hit) { return docContent; } - private SearchRequest buildSearchRequest(Map parameters) throws IOException { + protected SearchRequest buildSearchRequest(Map parameters) throws IOException { String question = parameters.get(INPUT_FIELD); if (StringUtils.isBlank(question)) { throw new IllegalArgumentException("[" + INPUT_FIELD + "] is null or empty, can not process it."); @@ -89,8 +89,7 @@ private SearchRequest buildSearchRequest(Map parameters) thr searchSourceBuilder.parseXContent(queryParser); searchSourceBuilder.fetchSource(sourceFields, null); searchSourceBuilder.size(docSize); - SearchRequest searchRequest = new SearchRequest().source(searchSourceBuilder).indices(index); - return searchRequest; + return new SearchRequest().source(searchSourceBuilder).indices(parameters.getOrDefault(INDEX_FIELD, index)); } @Override diff --git a/src/main/java/org/opensearch/agent/tools/LogPatternTool.java b/src/main/java/org/opensearch/agent/tools/LogPatternTool.java new file mode 100644 index 00000000..99ca6446 --- /dev/null +++ b/src/main/java/org/opensearch/agent/tools/LogPatternTool.java @@ -0,0 +1,295 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.opensearch.ml.common.utils.StringUtils.gson; + +import java.security.AccessController; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.apache.commons.lang3.StringUtils; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.client.Client; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.logging.LoggerMessageFormat; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.ml.common.spi.tools.ToolAnnotation; +import org.opensearch.search.SearchHit; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableSet; + +import lombok.Builder; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j2; + +/** + * This tool supports generating log patterns on the input dsl and index. It's implemented by + * several steps: + * 1. Retrival [[${DOC_SIZE_FIELD}]] logs from index + * 2. Extract patterns for each retrieved log + * 2.1 Find Pattern Field: If users provide parameter [[${PATTERN_FIELD}]], use it as the pattern + * field; Otherwise, find the string field with the longest length on the first log. + * 2.2 Extract Pattern: If users provide parameter [[${PATTERN}]], compile it as a pattern; + * Otherwise, use [[${DEFAULT_IGNORED_CHARS}]]. It will remove all chars matching the pattern. + * 3. Group logs by their extracted patterns. + * 4. Find top N patterns with the largest sample log size. + * 5. For each found top N patterns, return [[${SAMPLE_LOG_SIZE}]] sample logs. + */ +@Log4j2 +@Getter +@Setter +@ToolAnnotation(LogPatternTool.TYPE) +public class LogPatternTool extends AbstractRetrieverTool { + public static final String TYPE = "LogPatternTool"; + + public static final String DEFAULT_DESCRIPTION = "Log Pattern Tool"; + public static final String TOP_N_PATTERN = "top_n_pattern"; + public static final String SAMPLE_LOG_SIZE = "sample_log_size"; + public static final String PATTERN_FIELD = "pattern_field"; + public static final String PATTERN = "pattern"; + public static final int LOG_PATTERN_DEFAULT_DOC_SIZE = 1000; + public static final int DEFAULT_TOP_N_PATTERN = 3; + public static final int DEFAULT_SAMPLE_LOG_SIZE = 20; + private static final ImmutableSet DEFAULT_IGNORED_CHARS = ImmutableSet + .copyOf("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789".chars().mapToObj(c -> (char) c).toArray(Character[]::new)); + + private String name = TYPE; + private int topNPattern; + private int sampleLogSize; + @EqualsAndHashCode.Exclude + private Pattern pattern; + + @Builder + public LogPatternTool( + Client client, + NamedXContentRegistry xContentRegistry, + int docSize, + int topNPattern, + int sampleLogSize, + String patternStr + ) { + super(client, xContentRegistry, null, null, docSize); + checkPositive(topNPattern, TOP_N_PATTERN); + checkPositive(sampleLogSize, SAMPLE_LOG_SIZE); + this.topNPattern = topNPattern; + this.sampleLogSize = sampleLogSize; + if (patternStr != null) + this.pattern = Pattern.compile(patternStr); + } + + @Override + protected String getQueryBody(String queryText) { + return queryText; + } + + @Override + public void run(Map parameters, ActionListener listener) { + int topNPattern = parameters.containsKey(TOP_N_PATTERN) ? getPositiveInteger(parameters, TOP_N_PATTERN) : this.topNPattern; + int sampleLogSize = parameters.containsKey(SAMPLE_LOG_SIZE) ? getPositiveInteger(parameters, SAMPLE_LOG_SIZE) : this.sampleLogSize; + Pattern pattern = parameters.containsKey(PATTERN) ? Pattern.compile(parameters.get(PATTERN)) : this.pattern; + + SearchRequest searchRequest; + try { + searchRequest = buildSearchRequest(parameters); + } catch (Exception e) { + log.error("Failed to build search request.", e); + listener.onFailure(e); + return; + } + + ActionListener actionListener = ActionListener.wrap(r -> { + SearchHit[] hits = r.getHits().getHits(); + + if (hits != null && hits.length > 0) { + String patternField = parameters.containsKey(PATTERN_FIELD) + ? parameters.get(PATTERN_FIELD) + : findLongestField(hits[0].getSourceAsMap()); + if (patternField == null) { + listener.onResponse((T) "Pattern field is not set and this index doesn't contain any string field"); + return; + } + Map>> patternGroups = new HashMap<>(); + for (SearchHit hit : hits) { + Map source = hit.getSourceAsMap(); + String patternValue = extractPattern((String) source.getOrDefault(patternField, ""), pattern); + List> group = patternGroups.computeIfAbsent(patternValue, k -> new ArrayList<>()); + group.add(source); + } + List> sortedEntries = patternGroups + .entrySet() + .stream() + .sorted(Comparator.comparingInt(entry -> -entry.getValue().size())) + .limit(topNPattern) + .map( + entry -> Map + .of( + "total count", + entry.getValue().size(), + "pattern", + entry.getKey(), + "sample logs", + entry.getValue().subList(0, Math.min(entry.getValue().size(), sampleLogSize)) + ) + ) + .collect(Collectors.toList()); + + listener + .onResponse((T) AccessController.doPrivileged((PrivilegedExceptionAction) () -> gson.toJson(sortedEntries))); + } else { + listener.onResponse((T) "Can not get any match from search result."); + } + }, e -> { + log.error("Failed to search index.", e); + listener.onFailure(e); + }); + client.search(searchRequest, actionListener); + } + + /** + * Extract a pattern from the value of a field by removing chars in the pattern. This function + * imitates the same logic of Observability log pattern feature here: + * parseValue + * @param rawString string value of the field to generate a pattern + * @param pattern @Nullable the specified pattern to remove, use DEFAULT_IGNORED_CHARS if null + * @return the generated pattern value + */ + @VisibleForTesting + static String extractPattern(String rawString, Pattern pattern) { + if (pattern != null) + return pattern.matcher(rawString).replaceAll(""); + char[] chars = rawString.toCharArray(); + int pos = 0; + for (int i = 0; i < chars.length; i++) { + if (!DEFAULT_IGNORED_CHARS.contains(chars[i])) { + chars[pos++] = chars[i]; + } + } + return new String(chars, 0, pos); + } + + /** + * Find the longest field in the sample log source. This function imitates the same logic of + * Observability log pattern feature here: + * setDefaultPatternsField + * @param sampleLogSource sample log source map + * @return the longest field name + */ + @VisibleForTesting + static String findLongestField(Map sampleLogSource) { + String longestField = null; + int maxLength = 0; + + for (Map.Entry entry : sampleLogSource.entrySet()) { + Object value = entry.getValue(); + if (value instanceof String) { + String stringValue = (String) value; + int length = stringValue.length(); + if (length > maxLength) { + maxLength = length; + longestField = entry.getKey(); + } + } + } + return longestField; + } + + @Override + public String getType() { + return TYPE; + } + + @Override + public boolean validate(Map parameters) { + // LogPatternTool needs to pass index and input as parameter in runtime. + return super.validate(parameters) && parameters.containsKey(INDEX_FIELD) && !StringUtils.isBlank(parameters.get(INDEX_FIELD)); + } + + private static int getPositiveInteger(Map params, String paramName) { + int value = getInteger(params, paramName); + checkPositive(value, paramName); + return value; + } + + private static int getInteger(Map params, String paramName) { + int value; + try { + value = Integer.parseInt((String) params.get(paramName)); + } catch (NumberFormatException e) { + throw new IllegalArgumentException( + LoggerMessageFormat.format("Invalid value {} for parameter {}, it should be a number", params.get(paramName), paramName) + ); + } + return value; + } + + private static void checkPositive(int value, String paramName) { + if (value <= 0) { + throw new IllegalArgumentException( + LoggerMessageFormat.format("Invalid value {} for parameter {}, it should be positive", value, paramName) + ); + } + } + + public static class Factory extends AbstractRetrieverTool.Factory { + private static LogPatternTool.Factory INSTANCE; + + public static LogPatternTool.Factory getInstance() { + if (INSTANCE != null) { + return INSTANCE; + } + synchronized (LogPatternTool.class) { + if (INSTANCE != null) { + return INSTANCE; + } + INSTANCE = new LogPatternTool.Factory(); + return INSTANCE; + } + } + + @Override + public LogPatternTool create(Map params) { + int docSize = params.containsKey(DOC_SIZE_FIELD) ? getInteger(params, DOC_SIZE_FIELD) : LOG_PATTERN_DEFAULT_DOC_SIZE; + int topNPattern = params.containsKey(TOP_N_PATTERN) ? getInteger(params, TOP_N_PATTERN) : DEFAULT_TOP_N_PATTERN; + int sampleLogSize = params.containsKey(SAMPLE_LOG_SIZE) ? getInteger(params, SAMPLE_LOG_SIZE) : DEFAULT_SAMPLE_LOG_SIZE; + String patternStr = params.containsKey(PATTERN) ? (String) params.get(PATTERN) : null; + return LogPatternTool + .builder() + .client(client) + .xContentRegistry(xContentRegistry) + .docSize(docSize) + .topNPattern(topNPattern) + .sampleLogSize(sampleLogSize) + .patternStr(patternStr) + .build(); + } + + @Override + public String getDefaultType() { + return TYPE; + } + + @Override + public String getDefaultVersion() { + return null; + } + + @Override + public String getDefaultDescription() { + return DEFAULT_DESCRIPTION; + } + } +} diff --git a/src/test/java/org/opensearch/agent/tools/LogPatternToolTests.java b/src/test/java/org/opensearch/agent/tools/LogPatternToolTests.java new file mode 100644 index 00000000..21731005 --- /dev/null +++ b/src/test/java/org/opensearch/agent/tools/LogPatternToolTests.java @@ -0,0 +1,58 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.agent.tools; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import java.util.HashMap; +import java.util.Map; +import java.util.regex.Pattern; + +import org.junit.Before; +import org.junit.Test; + +import lombok.SneakyThrows; + +public class LogPatternToolTests { + + public static final String TEST_QUERY_TEXT = "123fsd23134sdfouh"; + private Map params = new HashMap<>(); + + @Before + public void setup() {} + + @Test + @SneakyThrows + public void testCreateTool() { + LogPatternTool tool = LogPatternTool.Factory.getInstance().create(params); + assertEquals(LogPatternTool.LOG_PATTERN_DEFAULT_DOC_SIZE, (int) tool.docSize); + assertEquals(LogPatternTool.DEFAULT_TOP_N_PATTERN, tool.getTopNPattern()); + assertEquals(LogPatternTool.DEFAULT_SAMPLE_LOG_SIZE, tool.getSampleLogSize()); + assertNull(tool.getPattern()); + assertEquals("LogPatternTool", tool.getType()); + assertEquals("LogPatternTool", tool.getName()); + assertEquals(LogPatternTool.DEFAULT_DESCRIPTION, LogPatternTool.Factory.getInstance().getDefaultDescription()); + } + + @Test + public void testGetQueryBody() { + LogPatternTool tool = LogPatternTool.Factory.getInstance().create(params); + assertEquals(TEST_QUERY_TEXT, tool.getQueryBody(TEST_QUERY_TEXT)); + } + + @Test + public void testFindLongestField() { + assertEquals("field2", LogPatternTool.findLongestField(Map.of("field1", "123", "field2", "1234", "filed3", 1234))); + } + + @Test + public void testExtractPattern() { + assertEquals("././", LogPatternTool.extractPattern("123.abc/.AB/", null)); + assertEquals("123.c/.AB/", LogPatternTool.extractPattern("123.abc/.AB/", Pattern.compile("ab"))); + assertEquals(".abc/.AB/", LogPatternTool.extractPattern("123.abc/.AB/", Pattern.compile("[0-9]"))); + } +} From ec87ad623da7c5c61e662e6021285357acd0018e Mon Sep 17 00:00:00 2001 From: Xinyuan Lu <74362153+xinyual@users.noreply.github.com> Date: Sat, 12 Oct 2024 16:12:10 +0800 Subject: [PATCH 119/119] Revert "Add LogPatternTool (#413) (#418) (#422)" This reverts commit 7cd2c22d0edef0eaa4a80171bd80fa22a91b7b8a. --- .../java/org/opensearch/agent/ToolPlugin.java | 5 +- .../agent/tools/AbstractRetrieverTool.java | 5 +- .../agent/tools/LogPatternTool.java | 295 ------------------ .../agent/tools/LogPatternToolTests.java | 58 ---- 4 files changed, 4 insertions(+), 359 deletions(-) delete mode 100644 src/main/java/org/opensearch/agent/tools/LogPatternTool.java delete mode 100644 src/test/java/org/opensearch/agent/tools/LogPatternToolTests.java diff --git a/src/main/java/org/opensearch/agent/ToolPlugin.java b/src/main/java/org/opensearch/agent/ToolPlugin.java index 41c8f5c3..ac0aa484 100644 --- a/src/main/java/org/opensearch/agent/ToolPlugin.java +++ b/src/main/java/org/opensearch/agent/ToolPlugin.java @@ -11,7 +11,6 @@ import java.util.function.Supplier; import org.opensearch.agent.tools.CreateAnomalyDetectorTool; -import org.opensearch.agent.tools.LogPatternTool; import org.opensearch.agent.tools.NeuralSparseSearchTool; import org.opensearch.agent.tools.PPLTool; import org.opensearch.agent.tools.RAGTool; @@ -70,7 +69,6 @@ public Collection createComponents( SearchAnomalyResultsTool.Factory.getInstance().init(client, namedWriteableRegistry); SearchMonitorsTool.Factory.getInstance().init(client); CreateAnomalyDetectorTool.Factory.getInstance().init(client); - LogPatternTool.Factory.getInstance().init(client, xContentRegistry); return Collections.emptyList(); } @@ -86,8 +84,7 @@ public List> getToolFactories() { SearchAnomalyDetectorsTool.Factory.getInstance(), SearchAnomalyResultsTool.Factory.getInstance(), SearchMonitorsTool.Factory.getInstance(), - CreateAnomalyDetectorTool.Factory.getInstance(), - LogPatternTool.Factory.getInstance() + CreateAnomalyDetectorTool.Factory.getInstance() ); } diff --git a/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java b/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java index 5abcd758..f01dde7e 100644 --- a/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java +++ b/src/main/java/org/opensearch/agent/tools/AbstractRetrieverTool.java @@ -77,7 +77,7 @@ private static Map processResponse(SearchHit hit) { return docContent; } - protected SearchRequest buildSearchRequest(Map parameters) throws IOException { + private SearchRequest buildSearchRequest(Map parameters) throws IOException { String question = parameters.get(INPUT_FIELD); if (StringUtils.isBlank(question)) { throw new IllegalArgumentException("[" + INPUT_FIELD + "] is null or empty, can not process it."); @@ -89,7 +89,8 @@ protected SearchRequest buildSearchRequest(Map parameters) t searchSourceBuilder.parseXContent(queryParser); searchSourceBuilder.fetchSource(sourceFields, null); searchSourceBuilder.size(docSize); - return new SearchRequest().source(searchSourceBuilder).indices(parameters.getOrDefault(INDEX_FIELD, index)); + SearchRequest searchRequest = new SearchRequest().source(searchSourceBuilder).indices(index); + return searchRequest; } @Override diff --git a/src/main/java/org/opensearch/agent/tools/LogPatternTool.java b/src/main/java/org/opensearch/agent/tools/LogPatternTool.java deleted file mode 100644 index 99ca6446..00000000 --- a/src/main/java/org/opensearch/agent/tools/LogPatternTool.java +++ /dev/null @@ -1,295 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.agent.tools; - -import static org.opensearch.ml.common.utils.StringUtils.gson; - -import java.security.AccessController; -import java.security.PrivilegedExceptionAction; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.regex.Pattern; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.StringUtils; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchResponse; -import org.opensearch.client.Client; -import org.opensearch.core.action.ActionListener; -import org.opensearch.core.common.logging.LoggerMessageFormat; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.ml.common.spi.tools.ToolAnnotation; -import org.opensearch.search.SearchHit; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableSet; - -import lombok.Builder; -import lombok.EqualsAndHashCode; -import lombok.Getter; -import lombok.Setter; -import lombok.extern.log4j.Log4j2; - -/** - * This tool supports generating log patterns on the input dsl and index. It's implemented by - * several steps: - * 1. Retrival [[${DOC_SIZE_FIELD}]] logs from index - * 2. Extract patterns for each retrieved log - * 2.1 Find Pattern Field: If users provide parameter [[${PATTERN_FIELD}]], use it as the pattern - * field; Otherwise, find the string field with the longest length on the first log. - * 2.2 Extract Pattern: If users provide parameter [[${PATTERN}]], compile it as a pattern; - * Otherwise, use [[${DEFAULT_IGNORED_CHARS}]]. It will remove all chars matching the pattern. - * 3. Group logs by their extracted patterns. - * 4. Find top N patterns with the largest sample log size. - * 5. For each found top N patterns, return [[${SAMPLE_LOG_SIZE}]] sample logs. - */ -@Log4j2 -@Getter -@Setter -@ToolAnnotation(LogPatternTool.TYPE) -public class LogPatternTool extends AbstractRetrieverTool { - public static final String TYPE = "LogPatternTool"; - - public static final String DEFAULT_DESCRIPTION = "Log Pattern Tool"; - public static final String TOP_N_PATTERN = "top_n_pattern"; - public static final String SAMPLE_LOG_SIZE = "sample_log_size"; - public static final String PATTERN_FIELD = "pattern_field"; - public static final String PATTERN = "pattern"; - public static final int LOG_PATTERN_DEFAULT_DOC_SIZE = 1000; - public static final int DEFAULT_TOP_N_PATTERN = 3; - public static final int DEFAULT_SAMPLE_LOG_SIZE = 20; - private static final ImmutableSet DEFAULT_IGNORED_CHARS = ImmutableSet - .copyOf("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789".chars().mapToObj(c -> (char) c).toArray(Character[]::new)); - - private String name = TYPE; - private int topNPattern; - private int sampleLogSize; - @EqualsAndHashCode.Exclude - private Pattern pattern; - - @Builder - public LogPatternTool( - Client client, - NamedXContentRegistry xContentRegistry, - int docSize, - int topNPattern, - int sampleLogSize, - String patternStr - ) { - super(client, xContentRegistry, null, null, docSize); - checkPositive(topNPattern, TOP_N_PATTERN); - checkPositive(sampleLogSize, SAMPLE_LOG_SIZE); - this.topNPattern = topNPattern; - this.sampleLogSize = sampleLogSize; - if (patternStr != null) - this.pattern = Pattern.compile(patternStr); - } - - @Override - protected String getQueryBody(String queryText) { - return queryText; - } - - @Override - public void run(Map parameters, ActionListener listener) { - int topNPattern = parameters.containsKey(TOP_N_PATTERN) ? getPositiveInteger(parameters, TOP_N_PATTERN) : this.topNPattern; - int sampleLogSize = parameters.containsKey(SAMPLE_LOG_SIZE) ? getPositiveInteger(parameters, SAMPLE_LOG_SIZE) : this.sampleLogSize; - Pattern pattern = parameters.containsKey(PATTERN) ? Pattern.compile(parameters.get(PATTERN)) : this.pattern; - - SearchRequest searchRequest; - try { - searchRequest = buildSearchRequest(parameters); - } catch (Exception e) { - log.error("Failed to build search request.", e); - listener.onFailure(e); - return; - } - - ActionListener actionListener = ActionListener.wrap(r -> { - SearchHit[] hits = r.getHits().getHits(); - - if (hits != null && hits.length > 0) { - String patternField = parameters.containsKey(PATTERN_FIELD) - ? parameters.get(PATTERN_FIELD) - : findLongestField(hits[0].getSourceAsMap()); - if (patternField == null) { - listener.onResponse((T) "Pattern field is not set and this index doesn't contain any string field"); - return; - } - Map>> patternGroups = new HashMap<>(); - for (SearchHit hit : hits) { - Map source = hit.getSourceAsMap(); - String patternValue = extractPattern((String) source.getOrDefault(patternField, ""), pattern); - List> group = patternGroups.computeIfAbsent(patternValue, k -> new ArrayList<>()); - group.add(source); - } - List> sortedEntries = patternGroups - .entrySet() - .stream() - .sorted(Comparator.comparingInt(entry -> -entry.getValue().size())) - .limit(topNPattern) - .map( - entry -> Map - .of( - "total count", - entry.getValue().size(), - "pattern", - entry.getKey(), - "sample logs", - entry.getValue().subList(0, Math.min(entry.getValue().size(), sampleLogSize)) - ) - ) - .collect(Collectors.toList()); - - listener - .onResponse((T) AccessController.doPrivileged((PrivilegedExceptionAction) () -> gson.toJson(sortedEntries))); - } else { - listener.onResponse((T) "Can not get any match from search result."); - } - }, e -> { - log.error("Failed to search index.", e); - listener.onFailure(e); - }); - client.search(searchRequest, actionListener); - } - - /** - * Extract a pattern from the value of a field by removing chars in the pattern. This function - * imitates the same logic of Observability log pattern feature here: - * parseValue - * @param rawString string value of the field to generate a pattern - * @param pattern @Nullable the specified pattern to remove, use DEFAULT_IGNORED_CHARS if null - * @return the generated pattern value - */ - @VisibleForTesting - static String extractPattern(String rawString, Pattern pattern) { - if (pattern != null) - return pattern.matcher(rawString).replaceAll(""); - char[] chars = rawString.toCharArray(); - int pos = 0; - for (int i = 0; i < chars.length; i++) { - if (!DEFAULT_IGNORED_CHARS.contains(chars[i])) { - chars[pos++] = chars[i]; - } - } - return new String(chars, 0, pos); - } - - /** - * Find the longest field in the sample log source. This function imitates the same logic of - * Observability log pattern feature here: - * setDefaultPatternsField - * @param sampleLogSource sample log source map - * @return the longest field name - */ - @VisibleForTesting - static String findLongestField(Map sampleLogSource) { - String longestField = null; - int maxLength = 0; - - for (Map.Entry entry : sampleLogSource.entrySet()) { - Object value = entry.getValue(); - if (value instanceof String) { - String stringValue = (String) value; - int length = stringValue.length(); - if (length > maxLength) { - maxLength = length; - longestField = entry.getKey(); - } - } - } - return longestField; - } - - @Override - public String getType() { - return TYPE; - } - - @Override - public boolean validate(Map parameters) { - // LogPatternTool needs to pass index and input as parameter in runtime. - return super.validate(parameters) && parameters.containsKey(INDEX_FIELD) && !StringUtils.isBlank(parameters.get(INDEX_FIELD)); - } - - private static int getPositiveInteger(Map params, String paramName) { - int value = getInteger(params, paramName); - checkPositive(value, paramName); - return value; - } - - private static int getInteger(Map params, String paramName) { - int value; - try { - value = Integer.parseInt((String) params.get(paramName)); - } catch (NumberFormatException e) { - throw new IllegalArgumentException( - LoggerMessageFormat.format("Invalid value {} for parameter {}, it should be a number", params.get(paramName), paramName) - ); - } - return value; - } - - private static void checkPositive(int value, String paramName) { - if (value <= 0) { - throw new IllegalArgumentException( - LoggerMessageFormat.format("Invalid value {} for parameter {}, it should be positive", value, paramName) - ); - } - } - - public static class Factory extends AbstractRetrieverTool.Factory { - private static LogPatternTool.Factory INSTANCE; - - public static LogPatternTool.Factory getInstance() { - if (INSTANCE != null) { - return INSTANCE; - } - synchronized (LogPatternTool.class) { - if (INSTANCE != null) { - return INSTANCE; - } - INSTANCE = new LogPatternTool.Factory(); - return INSTANCE; - } - } - - @Override - public LogPatternTool create(Map params) { - int docSize = params.containsKey(DOC_SIZE_FIELD) ? getInteger(params, DOC_SIZE_FIELD) : LOG_PATTERN_DEFAULT_DOC_SIZE; - int topNPattern = params.containsKey(TOP_N_PATTERN) ? getInteger(params, TOP_N_PATTERN) : DEFAULT_TOP_N_PATTERN; - int sampleLogSize = params.containsKey(SAMPLE_LOG_SIZE) ? getInteger(params, SAMPLE_LOG_SIZE) : DEFAULT_SAMPLE_LOG_SIZE; - String patternStr = params.containsKey(PATTERN) ? (String) params.get(PATTERN) : null; - return LogPatternTool - .builder() - .client(client) - .xContentRegistry(xContentRegistry) - .docSize(docSize) - .topNPattern(topNPattern) - .sampleLogSize(sampleLogSize) - .patternStr(patternStr) - .build(); - } - - @Override - public String getDefaultType() { - return TYPE; - } - - @Override - public String getDefaultVersion() { - return null; - } - - @Override - public String getDefaultDescription() { - return DEFAULT_DESCRIPTION; - } - } -} diff --git a/src/test/java/org/opensearch/agent/tools/LogPatternToolTests.java b/src/test/java/org/opensearch/agent/tools/LogPatternToolTests.java deleted file mode 100644 index 21731005..00000000 --- a/src/test/java/org/opensearch/agent/tools/LogPatternToolTests.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.agent.tools; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -import java.util.HashMap; -import java.util.Map; -import java.util.regex.Pattern; - -import org.junit.Before; -import org.junit.Test; - -import lombok.SneakyThrows; - -public class LogPatternToolTests { - - public static final String TEST_QUERY_TEXT = "123fsd23134sdfouh"; - private Map params = new HashMap<>(); - - @Before - public void setup() {} - - @Test - @SneakyThrows - public void testCreateTool() { - LogPatternTool tool = LogPatternTool.Factory.getInstance().create(params); - assertEquals(LogPatternTool.LOG_PATTERN_DEFAULT_DOC_SIZE, (int) tool.docSize); - assertEquals(LogPatternTool.DEFAULT_TOP_N_PATTERN, tool.getTopNPattern()); - assertEquals(LogPatternTool.DEFAULT_SAMPLE_LOG_SIZE, tool.getSampleLogSize()); - assertNull(tool.getPattern()); - assertEquals("LogPatternTool", tool.getType()); - assertEquals("LogPatternTool", tool.getName()); - assertEquals(LogPatternTool.DEFAULT_DESCRIPTION, LogPatternTool.Factory.getInstance().getDefaultDescription()); - } - - @Test - public void testGetQueryBody() { - LogPatternTool tool = LogPatternTool.Factory.getInstance().create(params); - assertEquals(TEST_QUERY_TEXT, tool.getQueryBody(TEST_QUERY_TEXT)); - } - - @Test - public void testFindLongestField() { - assertEquals("field2", LogPatternTool.findLongestField(Map.of("field1", "123", "field2", "1234", "filed3", 1234))); - } - - @Test - public void testExtractPattern() { - assertEquals("././", LogPatternTool.extractPattern("123.abc/.AB/", null)); - assertEquals("123.c/.AB/", LogPatternTool.extractPattern("123.abc/.AB/", Pattern.compile("ab"))); - assertEquals(".abc/.AB/", LogPatternTool.extractPattern("123.abc/.AB/", Pattern.compile("[0-9]"))); - } -}