File tree Expand file tree Collapse file tree 2 files changed +34
-1
lines changed
openai-core/src/main/scala/io/cequence/openaiscala/service/adapter
openai-examples/src/main/scala/io/cequence/openaiscala/examples Expand file tree Collapse file tree 2 files changed +34
-1
lines changed Original file line number Diff line number Diff line change @@ -90,7 +90,7 @@ object ChatCompletionSettingsConversions {
90
90
settings.response_format_type.isDefined && settings.response_format_type.get != ChatCompletionResponseFormatType .text,
91
91
_.copy(response_format_type = None ),
92
92
Some (
93
- " O1 models don't support json object/schema response format, converting to None."
93
+ " O1 (preview) models don't support json object/schema response format, converting to None."
94
94
),
95
95
warning = true
96
96
)
Original file line number Diff line number Diff line change
1
+ package io .cequence .openaiscala .examples
2
+
3
+ import io .cequence .openaiscala .domain ._
4
+ import io .cequence .openaiscala .domain .settings .{
5
+ ChatCompletionResponseFormatType ,
6
+ CreateChatCompletionSettings
7
+ }
8
+
9
+ import scala .concurrent .Future
10
+
11
+ object CreateChatCompletionWithO1 extends Example {
12
+
13
+ private val messages = Seq (
14
+ // system message still works for O1 models but moving forward DeveloperMessage should be used instead
15
+ SystemMessage (" You are a helpful weather assistant who likes to make jokes." ),
16
+ UserMessage (" What is the weather like in Norway per major cities? Answer in json format." )
17
+ )
18
+
19
+ override protected def run : Future [_] =
20
+ service
21
+ .createChatCompletion(
22
+ messages = messages,
23
+ settings = CreateChatCompletionSettings (
24
+ model = ModelId .o1,
25
+ temperature = Some (0.1 ),
26
+ response_format_type = Some (ChatCompletionResponseFormatType .json_object),
27
+ max_tokens = Some (4000 )
28
+ )
29
+ )
30
+ .map { content =>
31
+ printMessageContent(content)
32
+ }
33
+ }
You can’t perform that action at this time.
0 commit comments