Skip to content

Commit b163bbe

Browse files
committed
Grok provider registered, grok-beta model and new examples added
1 parent 02c8f0d commit b163bbe

File tree

8 files changed

+93
-3
lines changed

8 files changed

+93
-3
lines changed

openai-core/src/main/scala/io/cequence/openaiscala/domain/NonOpenAIModelId.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -195,4 +195,9 @@ object NonOpenAIModelId {
195195
val stripedhyena_nous_7b = "togethercomputer/StripedHyena-Nous-7B" // Together AI
196196
val alpaca_7b = "togethercomputer/alpaca-7b" // Together AI
197197
val solar_10_7b_instruct_v1_0 = "upstage/SOLAR-10.7B-Instruct-v1.0" // Together AI
198+
199+
// Grok
200+
201+
// context 131072
202+
val grok_beta = "grok-beta"
198203
}

openai-core/src/main/scala/io/cequence/openaiscala/service/ChatProviderSettings.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,5 @@ object ChatProviderSettings {
1212
val octoML = ProviderSettings("https://text.octoai.run/v1/", "OCTOAI_TOKEN")
1313
val togetherAI =
1414
ProviderSettings("https://api.together.xyz/v1/", "TOGETHERAI_API_KEY")
15+
val grok = ProviderSettings("https://api.x.ai/v1/", "GROK_API_KEY")
1516
}

openai-core/src/main/scala/io/cequence/openaiscala/service/adapter/MultiServiceAdapter.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,4 +36,4 @@ private class RandomOrderAdapter[+S <: CloseableService](
3636
val underlyings: Seq[S]
3737
) extends MultiServiceAdapter[S] {
3838
protected def calcIndex: Int = Random.nextInt(count)
39-
}
39+
}

openai-core/src/main/scala/io/cequence/openaiscala/service/adapter/OpenAIServiceAdapters.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,8 @@ trait OpenAIServiceAdapters[S <: CloseableService] {
3636
wrapAndDelegate(new RandomOrderAdapter(underlyings))
3737

3838
def parallelTakeFirst(
39-
underlyings: S*)(
39+
underlyings: S*
40+
)(
4041
implicit materializer: Materializer
4142
): S =
4243
wrapAndDelegate(new ParallelTakeFirstAdapter(underlyings))

openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/ChatCompletionProvider.scala

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,14 @@ object ChatCompletionProvider {
6363
m: Materializer
6464
): OpenAIChatCompletionStreamedService = provide(ChatProviderSettings.togetherAI)
6565

66+
/**
67+
* Requires `GROK_API_KEY`
68+
*/
69+
def grok(
70+
implicit ec: ExecutionContext,
71+
m: Materializer
72+
): OpenAIChatCompletionStreamedService = provide(ChatProviderSettings.grok)
73+
6674
/**
6775
* Requires `VERTEXAI_API_KEY` and "VERTEXAI_LOCATION"
6876
*/
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
package io.cequence.openaiscala.examples.nonopenai
2+
3+
import io.cequence.openaiscala.domain._
4+
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
5+
import io.cequence.openaiscala.examples.ExampleBase
6+
import io.cequence.openaiscala.service.OpenAIChatCompletionService
7+
8+
import scala.concurrent.Future
9+
10+
/**
11+
* Requires `GROK_API_KEY` environment variable to be set.
12+
*/
13+
object GrokCreateChatCompletion extends ExampleBase[OpenAIChatCompletionService] {
14+
15+
override val service: OpenAIChatCompletionService = ChatCompletionProvider.grok
16+
17+
private val messages = Seq(
18+
SystemMessage("You are a helpful assistant."),
19+
UserMessage("What is the weather like in Norway?")
20+
)
21+
22+
private val modelId = NonOpenAIModelId.grok_beta
23+
24+
override protected def run: Future[_] =
25+
service
26+
.createChatCompletion(
27+
messages = messages,
28+
settings = CreateChatCompletionSettings(
29+
model = modelId,
30+
temperature = Some(0.1),
31+
max_tokens = Some(1024)
32+
)
33+
)
34+
.map(printMessageContent)
35+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
package io.cequence.openaiscala.examples.nonopenai
2+
3+
import akka.stream.scaladsl.Sink
4+
import io.cequence.openaiscala.domain._
5+
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
6+
import io.cequence.openaiscala.examples.ExampleBase
7+
import io.cequence.openaiscala.service.OpenAIChatCompletionStreamedServiceExtra
8+
9+
import scala.concurrent.Future
10+
11+
// requires `openai-scala-client-stream` as a dependency and `GROK_API_KEY` environment variable to be set
12+
object GrokCreateChatCompletionStreamed
13+
extends ExampleBase[OpenAIChatCompletionStreamedServiceExtra] {
14+
15+
override val service: OpenAIChatCompletionStreamedServiceExtra = ChatCompletionProvider.grok
16+
17+
private val messages = Seq(
18+
SystemMessage("You are a helpful assistant."),
19+
UserMessage("What is the weather like in Norway?")
20+
)
21+
22+
private val modelId = NonOpenAIModelId.grok_beta
23+
24+
override protected def run: Future[_] =
25+
service
26+
.createChatCompletionStreamed(
27+
messages = messages,
28+
settings = CreateChatCompletionSettings(
29+
model = modelId,
30+
temperature = Some(0.01),
31+
max_tokens = Some(512)
32+
)
33+
)
34+
.runWith(
35+
Sink.foreach { completion =>
36+
val content = completion.choices.headOption.flatMap(_.delta.content)
37+
print(content.getOrElse(""))
38+
}
39+
)
40+
}

openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/VertexAICreateChatCompletionWithOpenAIAdapter.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ object VertexAICreateChatCompletionWithOpenAIAdapter
1313

1414
override val service: OpenAIChatCompletionService = ChatCompletionProvider.vertexAI
1515

16-
private val model = NonOpenAIModelId.gemini_1_5_pro_001
16+
private val model = NonOpenAIModelId.gemini_1_5_pro_002
1717

1818
private val messages = Seq(
1919
SystemMessage("You are a helpful assistant who makes jokes about Google."),

0 commit comments

Comments
 (0)