From 70b5721fa42e7a1e42e61df707e8c19cd5f48d0c Mon Sep 17 00:00:00 2001 From: qianheng Date: Fri, 15 Nov 2024 12:51:05 +0800 Subject: [PATCH] Fix alert summary prompt (#958) * Fix alert summary prompt Signed-off-by: Heng Qian * Update CHANGELOG Signed-off-by: Heng Qian * Revert "Update CHANGELOG" Signed-off-by: Heng Qian * Add prefix and postfix in requestbody Signed-off-by: Heng Qian --------- Signed-off-by: Heng Qian --- sample-templates/alert-summary-agent-claude-tested.json | 2 +- sample-templates/alert-summary-agent-claude-tested.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sample-templates/alert-summary-agent-claude-tested.json b/sample-templates/alert-summary-agent-claude-tested.json index ab064f5a4..d025fddc0 100644 --- a/sample-templates/alert-summary-agent-claude-tested.json +++ b/sample-templates/alert-summary-agent-claude-tested.json @@ -26,7 +26,7 @@ "content-type": "application/json" }, "method": "POST", - "request_body": "{\"prompt\":\"${parameters.prompt}\", \"max_tokens_to_sample\":${parameters.max_tokens_to_sample}, \"temperature\":${parameters.temperature}, \"anthropic_version\":\"${parameters.anthropic_version}\" }", + "request_body": "{\"prompt\":\"\\n\\nHuman: ${parameters.prompt}\\n\\nAssistant:\", \"max_tokens_to_sample\":${parameters.max_tokens_to_sample}, \"temperature\":${parameters.temperature}, \"anthropic_version\":\"${parameters.anthropic_version}\" }", "action_type": "predict", "url": "https://bedrock-runtime.us-west-2.amazonaws.com/model/anthropic.claude-instant-v1/invoke" } diff --git a/sample-templates/alert-summary-agent-claude-tested.yml b/sample-templates/alert-summary-agent-claude-tested.yml index ce596e071..f1d03d080 100644 --- a/sample-templates/alert-summary-agent-claude-tested.yml +++ b/sample-templates/alert-summary-agent-claude-tested.yml @@ -24,7 +24,7 @@ workflows: x-amz-content-sha256: required content-type: application/json method: POST - request_body: '{"prompt":"${parameters.prompt}", "max_tokens_to_sample":${parameters.max_tokens_to_sample}, + request_body: '{"prompt":"\n\nHuman: ${parameters.prompt}\n\nAssistant:", "max_tokens_to_sample":${parameters.max_tokens_to_sample}, "temperature":${parameters.temperature}, "anthropic_version":"${parameters.anthropic_version}" }' action_type: predict