Skip to content

Commit 604c622

Browse files
committed
Refactor groqConversions to accept an optional reasoningFormat parameter.
1 parent e3538e3 commit 604c622

File tree

1 file changed

+13
-10
lines changed

1 file changed

+13
-10
lines changed

openai-core/src/main/scala/io/cequence/openaiscala/service/adapter/ChatCompletionSettingsConversions.scala

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,9 @@ object ChatCompletionSettingsConversions {
110110

111111
val o1Preview: SettingsConversion = generic(o1PreviewConversions)
112112

113-
private lazy val groqConversions = Seq(
113+
def groqConversions(
114+
reasoningFormat: Option[ReasoningFormat] = None
115+
) = Seq(
114116
// max tokens
115117
FieldConversionDef(
116118
settings =>
@@ -122,15 +124,16 @@ object ChatCompletionSettingsConversions {
122124
Some(
123125
"Groq deepseek R1 model doesn't support max_tokens, converting to max_completion_tokens."
124126
)
127+
),
128+
// reasoning format
129+
FieldConversionDef(
130+
settings => settings.model.endsWith(
131+
NonOpenAIModelId.deepseek_r1_distill_llama_70b
132+
) && reasoningFormat.isDefined,
133+
_.setReasoningFormat(reasoningFormat.get),
134+
Some(
135+
s"Setting reasoning format '${reasoningFormat.get}' for Groq deepseek R1 model."
136+
)
125137
)
126138
)
127-
128-
def groq(
129-
reasoningFormat: Option[ReasoningFormat] = None
130-
): SettingsConversion = {
131-
val conversions = generic(groqConversions)
132-
reasoningFormat
133-
.map(reasoningFormat => conversions.andThen(_.setReasoningFormat(reasoningFormat)))
134-
.getOrElse(conversions)
135-
}
136139
}

0 commit comments

Comments
 (0)