diff --git a/README.md b/README.md
index 273d185..6198184 100644
--- a/README.md
+++ b/README.md
@@ -5,16 +5,17 @@
[![codecov](https://codecov.io/gh/liushuangls/go-anthropic/graph/badge.svg?token=O6JSAOZORX)](https://codecov.io/gh/liushuangls/go-anthropic)
[![Sanity check](https://github.com/liushuangls/go-anthropic/actions/workflows/pr.yml/badge.svg)](https://github.com/liushuangls/go-anthropic/actions/workflows/pr.yml)
-Anthropic Claude API wrapper for Go (Unofficial). Support:
+Anthropic Claude API wrapper for Go (Unofficial).
+This package has support for:
- Completions
- Streaming Completions
- Messages
- Streaming Messages
-- Vision
-- Tool use
+- Message Batching
+- Vision and PDFs
+- Tool use (with [computer use](https://docs.anthropic.com/en/docs/build-with-claude/computer-use))
- Prompt Caching
-- PDF
- Token Counting
## Installation
@@ -163,7 +164,6 @@ func main() {
-
Messages Tool use example
```go
@@ -242,7 +242,6 @@ func main() {
fmt.Printf("Response: %+v\n", resp)
}
```
-
@@ -302,17 +301,13 @@ func main() {
fmt.Println(resp.Content[0].GetText())
}
```
-
VertexAI example
-
-
If you are using a Google Credentials file, you can use the following code to create a client:
```go
-
package main
import (
@@ -348,7 +343,7 @@ func main() {
fmt.Println(token.AccessToken)
client := anthropic.NewClient(token.AccessToken, anthropic.WithVertexAI("", ""))
-
+
resp, err := client.CreateMessagesStream(context.Background(), anthropic.MessagesStreamRequest{
MessagesRequest: anthropic.MessagesRequest{
Model: anthropic.ModelClaude3Haiku20240307,
@@ -372,11 +367,164 @@ func main() {
}
fmt.Println(resp.Content[0].GetText())
}
+```
+
+
+
+Message Batching
+
+doc: https://docs.anthropic.com/en/docs/build-with-claude/message-batches
+
+```go
+package main
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "os"
+
+ "github.com/liushuangls/go-anthropic/v2"
+)
+
+func main() {
+ client := anthropic.NewClient(
+ "your anthropic api key",
+ anthropic.WithBetaVersion(anthropic.BetaMessageBatches20240924),
+ )
+
+ resp, err := client.CreateBatch(context.Background(),
+ anthropic.BatchRequest{
+ Requests: []anthropic.InnerRequests{
+ {
+ CustomId: myId,
+ Params: anthropic.MessagesRequest{
+ Model: anthropic.ModelClaude3Haiku20240307,
+ MultiSystem: anthropic.NewMultiSystemMessages(
+ "you are an assistant",
+ "you are snarky",
+ ),
+ MaxTokens: 10,
+ Messages: []anthropic.Message{
+ anthropic.NewUserTextMessage("What is your name?"),
+ anthropic.NewAssistantTextMessage("My name is Claude."),
+ anthropic.NewUserTextMessage("What is your favorite color?"),
+ },
+ },
+ },
+ },
+ },
+ )
+ if err != nil {
+ var e *anthropic.APIError
+ if errors.As(err, &e) {
+ fmt.Printf("Messages error, type: %s, message: %s", e.Type, e.Message)
+ } else {
+ fmt.Printf("Messages error: %v\n", err)
+ }
+ return
+ }
+ fmt.Println(resp)
+
+ retrieveResp, err := client.RetrieveBatch(ctx, resp.Id)
+ if err != nil {
+ var e *anthropic.APIError
+ if errors.As(err, &e) {
+ fmt.Printf("Messages error, type: %s, message: %s", e.Type, e.Message)
+ } else {
+ fmt.Printf("Messages error: %v\n", err)
+ }
+ return
+ }
+ fmt.Println(retrieveResp)
+
+ resultResp, err := client.RetrieveBatchResults(ctx, "batch_id_your-batch-here")
+ if err != nil {
+ var e *anthropic.APIError
+ if errors.As(err, &e) {
+ fmt.Printf("Messages error, type: %s, message: %s", e.Type, e.Message)
+ } else {
+ fmt.Printf("Messages error: %v\n", err)
+ }
+ return
+ }
+ fmt.Println(resultResp)
+
+
+ listResp, err := client.ListBatches(ctx, anthropic.ListBatchesRequest{})
+ if err != nil {
+ var e *anthropic.APIError
+ if errors.As(err, &e) {
+ fmt.Printf("Messages error, type: %s, message: %s", e.Type, e.Message)
+ } else {
+ fmt.Printf("Messages error: %v\n", err)
+ }
+ return
+ }
+ fmt.Println(listResp)
+
+
+ cancelResp, err := client.CancelBatch(ctx, "batch_id_your-batch-here")
+ if err != nil {
+ t.Fatalf("CancelBatch error: %s", err)
+ }
+ fmt.Println(cancelResp)
```
+
+Token Counting example
+
+doc: (https://docs.anthropic.com/en/docs/build-with-claude/token-counting)[https://docs.anthropic.com/en/docs/build-with-claude/token-counting]
+
+```go
+// TODO: add example!
+```
+
+
+
+### Beta features
+Anthropic provides several beta features that can be enabled using the following beta version identifiers:
+
+Beta Version Identifier | Code Constant | Description
+-----------------------------------|---------------------------------|---------------------------------
+`tools-2024-04-04` | `BetaTools20240404` | Initial tools beta
+`tools-2024-05-16` | `BetaTools20240516` | Updated tools beta
+`prompt-caching-2024-07-31` | `BetaPromptCaching20240731` | Prompt caching beta
+`message-batches-2024-09-24` | `BetaMessageBatches20240924` | Message batching beta
+`token-counting-2024-11-01` | `BetaTokenCounting20241101` | Token counting beta
+`max-tokens-3-5-sonnet-2024-07-15` | `BetaMaxTokens35Sonnet20240715` | Max tokens beta for Sonnet model
+`computer-use-2024-10-22` | `BetaComputerUse20241022` | Computer use beta
+
+### Supported models
+The following models are supported by go-anthropic.
+These models are also available for use on Google's Vertex AI platform as well.
+
+Model Name | Model String
+-------------------------------|------------------------------
+ModelClaude2Dot0 | "claude-2.0"
+ModelClaude2Dot1 | "claude-2.1"
+ModelClaude3Opus20240229 | "claude-3-opus-20240229"
+ModelClaude3Sonnet20240229 | "claude-3-sonnet-20240229"
+ModelClaude3Dot5Sonnet20240620 | "claude-3-5-sonnet-20240620"
+ModelClaude3Dot5Sonnet20241022 | "claude-3-5-sonnet-20241022"
+ModelClaude3Dot5SonnetLatest | "claude-3-5-sonnet-latest"
+ModelClaude3Haiku20240307 | "claude-3-haiku-20240307"
+ModelClaude3Dot5HaikuLatest | "claude-3-5-haiku-latest"
+ModelClaude3Dot5Haiku20241022 | "claude-3-5-haiku-20241022"
+
+### Other Enums
+Two exported enums are additionally provided:
+- `RoleUser` = "user": Input role type for user messages
+- `RoleAssistant` = "assistant": Input role type for assistant/Claude messages
+
## Acknowledgments
-The following project had particular influence on go-anthropic is design.
+The following project had particular influence on go-anthropic's design.
- [sashabaranov/go-openai](https://github.com/sashabaranov/go-openai)
+
+Additionally, we thank anthropic for providing the API and documentation.
+
+## License
+go-anthropic is licensed under the Apache License, Version 2.0. See [LICENSE](LICENSE) for the full license text.