Skip to content

Commit 0029d00

Browse files
committed
Anthropic bedrock chat completion examples
1 parent bf9f759 commit 0029d00

6 files changed

+160
-3
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
package io.cequence.openaiscala.examples.nonopenai
2+
3+
import akka.stream.scaladsl.Sink
4+
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
5+
import io.cequence.openaiscala.domain.{NonOpenAIModelId, SystemMessage, UserMessage}
6+
import io.cequence.openaiscala.examples.ExampleBase
7+
import io.cequence.openaiscala.service.StreamedServiceTypes.OpenAIChatCompletionStreamedService
8+
9+
import scala.concurrent.Future
10+
11+
// requires `openai-scala-anthropic-client` as a dependency and `ANTHROPIC_API_KEY` environment variable to be set
12+
object AnthropicBedrockCreateChatCompletionStreamedWithOpenAIAdapter
13+
extends ExampleBase[OpenAIChatCompletionStreamedService] {
14+
15+
override val service: OpenAIChatCompletionStreamedService =
16+
ChatCompletionProvider.anthropicBedrock
17+
18+
private val messages = Seq(
19+
SystemMessage("You are a helpful assistant."),
20+
UserMessage("What is the weather like in Norway?")
21+
)
22+
override protected def run: Future[_] = {
23+
service
24+
.createChatCompletionStreamed(
25+
messages = messages,
26+
settings = CreateChatCompletionSettings(
27+
model = NonOpenAIModelId.claude_3_5_sonnet_20240620
28+
)
29+
)
30+
.runWith(
31+
Sink.foreach { response =>
32+
print(response.choices.headOption.flatMap(_.delta.content).getOrElse(""))
33+
}
34+
)
35+
}
36+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
package io.cequence.openaiscala.examples.nonopenai
2+
3+
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
4+
import io.cequence.openaiscala.domain.{NonOpenAIModelId, SystemMessage, UserMessage}
5+
import io.cequence.openaiscala.examples.ExampleBase
6+
import io.cequence.openaiscala.service.OpenAIChatCompletionService
7+
8+
import scala.concurrent.Future
9+
10+
// requires `openai-scala-anthropic-client` as a dependency and 'AWS_BEDROCK_ACCESS_KEY', 'AWS_BEDROCK_SECRET_KEY', 'AWS_BEDROCK_REGION' environment variable to be set
11+
object AnthropicBedrockCreateChatCompletionWithOpenAIAdapter
12+
extends ExampleBase[OpenAIChatCompletionService] {
13+
14+
override val service: OpenAIChatCompletionService = ChatCompletionProvider.anthropicBedrock
15+
16+
private val messages = Seq(
17+
SystemMessage("You are a drunk assistant!"),
18+
UserMessage("What is the weather like in Norway?")
19+
)
20+
21+
private val modelId =
22+
// using 'us.' prefix because of the cross-region inference (enabled only in the us)
23+
"us." + NonOpenAIModelId.bedrock_claude_3_5_haiku_20241022_v1_0
24+
25+
override protected def run: Future[_] =
26+
service
27+
.createChatCompletion(
28+
messages = messages,
29+
settings = CreateChatCompletionSettings(modelId)
30+
)
31+
.map { content =>
32+
println(content.choices.headOption.map(_.message.content).getOrElse("N/A"))
33+
}
34+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
package io.cequence.openaiscala.examples.nonopenai
2+
3+
import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.TextBlock
4+
import io.cequence.openaiscala.anthropic.domain.Content.ContentBlockBase
5+
import io.cequence.openaiscala.anthropic.domain.Message
6+
import io.cequence.openaiscala.anthropic.domain.Message.{SystemMessage, UserMessage}
7+
import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse
8+
import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings
9+
import io.cequence.openaiscala.anthropic.service.{AnthropicService, AnthropicServiceFactory}
10+
import io.cequence.openaiscala.domain.NonOpenAIModelId
11+
import io.cequence.openaiscala.examples.ExampleBase
12+
13+
import scala.concurrent.Future
14+
15+
// requires `openai-scala-anthropic-client` as a dependency and 'AWS_BEDROCK_ACCESS_KEY', 'AWS_BEDROCK_SECRET_KEY', 'AWS_BEDROCK_REGION' environment variable to be set
16+
object AnthropicBedrockCreateMessage extends ExampleBase[AnthropicService] {
17+
18+
override protected val service: AnthropicService = AnthropicServiceFactory.forBedrock()
19+
20+
private val messages: Seq[Message] = Seq(
21+
SystemMessage("You are a drunk assistant!"),
22+
UserMessage("What is the weather like in Norway?")
23+
)
24+
25+
private val modelId =
26+
// using 'us.' prefix because of the cross-region inference (enabled only in the us)
27+
"us." + NonOpenAIModelId.bedrock_claude_3_5_sonnet_20241022_v2_0
28+
29+
override protected def run: Future[_] =
30+
service
31+
.createMessage(
32+
messages,
33+
settings = AnthropicCreateMessageSettings(
34+
model = modelId,
35+
max_tokens = 4096,
36+
temperature = Some(1.0)
37+
)
38+
)
39+
.map(printMessageContent)
40+
41+
private def printMessageContent(response: CreateMessageResponse) = {
42+
val text =
43+
response.content.blocks.collect { case ContentBlockBase(TextBlock(text), _) => text }
44+
.mkString(" ")
45+
println(text)
46+
}
47+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
package io.cequence.openaiscala.examples.nonopenai
2+
3+
import akka.stream.scaladsl.Sink
4+
import io.cequence.openaiscala.anthropic.domain.Message
5+
import io.cequence.openaiscala.anthropic.domain.Message.{SystemMessage, UserMessage}
6+
import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings
7+
import io.cequence.openaiscala.anthropic.service.{AnthropicService, AnthropicServiceFactory}
8+
import io.cequence.openaiscala.domain.NonOpenAIModelId
9+
import io.cequence.openaiscala.examples.ExampleBase
10+
11+
import scala.concurrent.Future
12+
13+
// requires `openai-scala-anthropic-client` as a dependency and 'AWS_BEDROCK_ACCESS_KEY', 'AWS_BEDROCK_SECRET_KEY', 'AWS_BEDROCK_REGION' environment variable to be set
14+
object AnthropicBedrockCreateMessageStreamed extends ExampleBase[AnthropicService] {
15+
16+
override protected val service: AnthropicService = AnthropicServiceFactory.forBedrock()
17+
18+
val messages: Seq[Message] = Seq(
19+
SystemMessage("You are a helpful assistant!"),
20+
UserMessage("Start with the letter S followed by a quick story about Norway and finish with the letter E.")
21+
)
22+
23+
private val modelId = "us." + NonOpenAIModelId.bedrock_claude_3_5_sonnet_20241022_v2_0
24+
25+
override protected def run: Future[_] =
26+
service
27+
.createMessageStreamed(
28+
messages,
29+
settings = AnthropicCreateMessageSettings(
30+
model = modelId,
31+
max_tokens = 4096
32+
)
33+
)
34+
.runWith(
35+
Sink.foreach { response =>
36+
print(response.delta.text)
37+
}
38+
)
39+
}

openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateCachedMessage.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,11 @@ package io.cequence.openaiscala.examples.nonopenai
22

33
import io.cequence.openaiscala.anthropic.domain.CacheControl.Ephemeral
44
import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.TextBlock
5-
import io.cequence.openaiscala.anthropic.domain.Content.{ContentBlockBase, SingleString}
5+
import io.cequence.openaiscala.anthropic.domain.Content.ContentBlockBase
66
import io.cequence.openaiscala.anthropic.domain.Message.{SystemMessage, UserMessage}
77
import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse
88
import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings
9-
import io.cequence.openaiscala.anthropic.domain.{Content, Message}
9+
import io.cequence.openaiscala.anthropic.domain.Message
1010
import io.cequence.openaiscala.anthropic.service.{AnthropicService, AnthropicServiceFactory}
1111
import io.cequence.openaiscala.domain.NonOpenAIModelId
1212
import io.cequence.openaiscala.examples.ExampleBase

openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/VertexAICreateChatCompletionStreamedWithOpenAIAdapter.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@ object VertexAICreateChatCompletionStreamedWithOpenAIAdapter
1515

1616
override val service: OpenAIChatCompletionStreamedService = ChatCompletionProvider.vertexAI
1717

18-
private val model = NonOpenAIModelId.gemini_1_5_flash_001
18+
// 2024-12-18: works only with us-central1
19+
private val model = NonOpenAIModelId.gemini_2_0_flash_exp
1920

2021
private val messages = Seq(
2122
SystemMessage("You are a helpful assistant who makes jokes about Google. Use markdown"),

0 commit comments

Comments
 (0)