Skip to content

Commit 5b326ae

Browse files
committed
Revert "install scalafmt sbt plugin and reformat all files"
This reverts commit a408426.
1 parent a408426 commit 5b326ae

File tree

53 files changed

+1228
-1666
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

53 files changed

+1228
-1666
lines changed

.scalafmt.conf

Lines changed: 0 additions & 2 deletions
This file was deleted.

openai-client-stream/src/main/scala/io/cequence/openaiscala/service/OpenAIServiceStreamedExtra.scala

Lines changed: 36 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -3,73 +3,51 @@ package io.cequence.openaiscala.service
33
import akka.NotUsed
44
import akka.stream.scaladsl.Source
55
import io.cequence.openaiscala.domain.MessageSpec
6-
import io.cequence.openaiscala.domain.response.{
7-
ChatCompletionChunkResponse,
8-
FineTuneEvent,
9-
TextCompletionResponse
10-
}
11-
import io.cequence.openaiscala.domain.settings.{
12-
CreateChatCompletionSettings,
13-
CreateCompletionSettings
14-
}
6+
import io.cequence.openaiscala.domain.response.{ChatCompletionChunkResponse, FineTuneEvent, TextCompletionResponse}
7+
import io.cequence.openaiscala.domain.settings.{CreateChatCompletionSettings, CreateCompletionSettings}
8+
159

1610
trait OpenAIServiceStreamedExtra extends OpenAIServiceConsts {
1711

18-
/** Creates a completion for the provided prompt and parameters with streamed
19-
* results.
20-
*
21-
* @param prompt
22-
* The prompt(s) to generate completions for, encoded as a string, array of
23-
* strings, array of tokens, or array of token arrays. Note that
24-
* <|endoftext|> is the document separator that the model sees during
25-
* training, so if a prompt is not specified the model will generate as if
26-
* from the beginning of a new document.
27-
* @param settings
28-
* @return
29-
* text completion response as a stream (source)
30-
*
31-
* @see
32-
* <a
33-
* href="https://beta.openai.com/docs/api-reference/completions/create">OpenAI
34-
* Doc</a>
35-
*/
12+
/**
13+
* Creates a completion for the provided prompt and parameters with streamed results.
14+
*
15+
* @param prompt The prompt(s) to generate completions for, encoded as a string, array of strings, array of tokens, or array of token arrays.
16+
Note that <|endoftext|> is the document separator that the model sees during training,
17+
so if a prompt is not specified the model will generate as if from the beginning of a new document.
18+
* @param settings
19+
* @return text completion response as a stream (source)
20+
*
21+
* @see <a href="https://beta.openai.com/docs/api-reference/completions/create">OpenAI Doc</a>
22+
*/
3623
def createCompletionStreamed(
37-
prompt: String,
38-
settings: CreateCompletionSettings = DefaultSettings.CreateCompletion
24+
prompt: String,
25+
settings: CreateCompletionSettings = DefaultSettings.CreateCompletion
3926
): Source[TextCompletionResponse, NotUsed]
4027

41-
/** Creates a completion for the chat message(s) with streamed results.
42-
*
43-
* @param messages
44-
* The messages to generate chat completions.
45-
* @param settings
46-
* @return
47-
* chat completion response
48-
*
49-
* @see
50-
* <a
51-
* href="https://platform.openai.com/docs/api-reference/chat/create">OpenAI
52-
* Doc</a>
53-
*/
28+
/**
29+
* Creates a completion for the chat message(s) with streamed results.
30+
*
31+
* @param messages The messages to generate chat completions.
32+
* @param settings
33+
* @return chat completion response
34+
*
35+
* @see <a href="https://platform.openai.com/docs/api-reference/chat/create">OpenAI Doc</a>
36+
*/
5437
def createChatCompletionStreamed(
55-
messages: Seq[MessageSpec],
56-
settings: CreateChatCompletionSettings =
57-
DefaultSettings.CreateChatCompletion
38+
messages: Seq[MessageSpec],
39+
settings: CreateChatCompletionSettings = DefaultSettings.CreateChatCompletion
5840
): Source[ChatCompletionChunkResponse, NotUsed]
5941

60-
/** Get fine-grained status updates for a fine-tune job with streamed results.
61-
*
62-
* @param fineTuneId
63-
* The ID of the fine-tune job to get events for.
64-
* @return
65-
* fine tune events or None if not found as a stream (source)
66-
*
67-
* @see
68-
* <a
69-
* href="https://beta.openai.com/docs/api-reference/fine-tunes/events">OpenAI
70-
* Doc</a>
71-
*/
42+
/**
43+
* Get fine-grained status updates for a fine-tune job with streamed results.
44+
*
45+
* @param fineTuneId The ID of the fine-tune job to get events for.
46+
* @return fine tune events or None if not found as a stream (source)
47+
*
48+
* @see <a href="https://beta.openai.com/docs/api-reference/fine-tunes/events">OpenAI Doc</a>
49+
*/
7250
def listFineTuneEventsStreamed(
73-
fineTuneId: String
51+
fineTuneId: String
7452
): Source[FineTuneEvent, NotUsed]
7553
}

openai-client-stream/src/main/scala/io/cequence/openaiscala/service/OpenAIServiceStreamedImpl.scala

Lines changed: 40 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -14,62 +14,53 @@ import play.api.libs.json.JsValue
1414

1515
import scala.concurrent.ExecutionContext
1616

17-
/** Private impl. class of [[OpenAIService]].
18-
*
19-
* @param apiKey
20-
* @param orgId
21-
* @param ec
22-
* @param materializer
23-
*
24-
* @since Jan
25-
* 2023
26-
*/
27-
private trait OpenAIServiceStreamedExtraImpl
28-
extends OpenAIServiceStreamedExtra
29-
with WSStreamRequestHelper {
17+
/**
18+
* Private impl. class of [[OpenAIService]].
19+
*
20+
* @param apiKey
21+
* @param orgId
22+
* @param ec
23+
* @param materializer
24+
*
25+
* @since Jan 2023
26+
*/
27+
private trait OpenAIServiceStreamedExtraImpl extends OpenAIServiceStreamedExtra with WSStreamRequestHelper {
3028
this: OpenAIServiceImpl =>
3129

3230
override def createCompletionStreamed(
33-
prompt: String,
34-
settings: CreateCompletionSettings
31+
prompt: String,
32+
settings: CreateCompletionSettings
3533
): Source[TextCompletionResponse, NotUsed] =
3634
execJsonStreamAux(
3735
Command.completions,
3836
"POST",
39-
bodyParams =
40-
createBodyParamsForCompletion(prompt, settings, stream = true)
37+
bodyParams = createBodyParamsForCompletion(prompt, settings, stream = true)
4138
).map { (json: JsValue) =>
42-
(json \ "error").toOption
43-
.map { error =>
44-
throw new OpenAIScalaClientException(error.toString())
45-
}
46-
.getOrElse(
47-
json.asSafe[TextCompletionResponse]
48-
)
39+
(json \ "error").toOption.map { error =>
40+
throw new OpenAIScalaClientException(error.toString())
41+
}.getOrElse(
42+
json.asSafe[TextCompletionResponse]
43+
)
4944
}
5045

5146
override def createChatCompletionStreamed(
52-
messages: Seq[MessageSpec],
53-
settings: CreateChatCompletionSettings =
54-
DefaultSettings.CreateChatCompletion
47+
messages: Seq[MessageSpec],
48+
settings: CreateChatCompletionSettings = DefaultSettings.CreateChatCompletion
5549
): Source[ChatCompletionChunkResponse, NotUsed] =
5650
execJsonStreamAux(
5751
Command.chat_completions,
5852
"POST",
59-
bodyParams =
60-
createBodyParamsForChatCompletion(messages, settings, stream = true)
53+
bodyParams = createBodyParamsForChatCompletion(messages, settings, stream = true)
6154
).map { (json: JsValue) =>
62-
(json \ "error").toOption
63-
.map { error =>
64-
throw new OpenAIScalaClientException(error.toString())
65-
}
66-
.getOrElse(
67-
json.asSafe[ChatCompletionChunkResponse]
68-
)
55+
(json \ "error").toOption.map { error =>
56+
throw new OpenAIScalaClientException(error.toString())
57+
}.getOrElse(
58+
json.asSafe[ChatCompletionChunkResponse]
59+
)
6960
}
7061

7162
override def listFineTuneEventsStreamed(
72-
fineTuneId: String
63+
fineTuneId: String
7364
): Source[FineTuneEvent, NotUsed] =
7465
execJsonStreamAux(
7566
Command.fine_tunes,
@@ -79,29 +70,21 @@ private trait OpenAIServiceStreamedExtraImpl
7970
Tag.stream -> Some(true)
8071
)
8172
).map { json =>
82-
(json \ "error").toOption
83-
.map { error =>
84-
throw new OpenAIScalaClientException(error.toString())
85-
}
86-
.getOrElse(
87-
json.asSafe[FineTuneEvent]
88-
)
73+
(json \ "error").toOption.map { error =>
74+
throw new OpenAIScalaClientException(error.toString())
75+
}.getOrElse(
76+
json.asSafe[FineTuneEvent]
77+
)
8978
}
9079
}
9180

92-
object OpenAIServiceStreamedFactory
93-
extends OpenAIServiceFactoryHelper[
94-
OpenAIService with OpenAIServiceStreamedExtra
95-
] {
81+
object OpenAIServiceStreamedFactory extends OpenAIServiceFactoryHelper[OpenAIService with OpenAIServiceStreamedExtra] {
9682

9783
override def apply(
98-
apiKey: String,
99-
orgId: Option[String] = None,
100-
timeouts: Option[Timeouts] = None
101-
)(implicit
102-
ec: ExecutionContext,
103-
materializer: Materializer
84+
apiKey: String,
85+
orgId: Option[String] = None,
86+
timeouts: Option[Timeouts] = None)(
87+
implicit ec: ExecutionContext, materializer: Materializer
10488
): OpenAIService with OpenAIServiceStreamedExtra =
105-
new OpenAIServiceImpl(apiKey, orgId, timeouts)
106-
with OpenAIServiceStreamedExtraImpl
107-
}
89+
new OpenAIServiceImpl(apiKey, orgId, timeouts) with OpenAIServiceStreamedExtraImpl
90+
}

openai-client-stream/src/main/scala/io/cequence/openaiscala/service/ws/WSStreamRequestHelper.scala

Lines changed: 29 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -8,22 +8,18 @@ import akka.stream.scaladsl.Framing.FramingException
88
import akka.stream.scaladsl.{Flow, Framing, Source}
99
import akka.util.ByteString
1010
import com.fasterxml.jackson.core.JsonParseException
11-
import io.cequence.openaiscala.{
12-
OpenAIScalaClientException,
13-
OpenAIScalaClientTimeoutException,
14-
OpenAIScalaClientUnknownHostException
15-
}
11+
import io.cequence.openaiscala.{OpenAIScalaClientException, OpenAIScalaClientTimeoutException, OpenAIScalaClientUnknownHostException}
1612
import play.api.libs.json.{JsObject, JsString, JsValue, Json}
1713
import play.api.libs.ws.JsonBodyWritables._
1814

1915
import java.net.UnknownHostException
2016
import java.util.concurrent.TimeoutException
2117

22-
/** Stream request support specifically tailored for OpenAI API.
23-
*
24-
* @since Feb
25-
* 2023
26-
*/
18+
/**
19+
* Stream request support specifically tailored for OpenAI API.
20+
*
21+
* @since Feb 2023
22+
*/
2723
trait WSStreamRequestHelper {
2824
this: WSRequestHelper =>
2925

@@ -35,18 +31,16 @@ trait WSStreamRequestHelper {
3531
private implicit val jsonMarshaller: Unmarshaller[ByteString, JsValue] =
3632
Unmarshaller.strict[ByteString, JsValue] { byteString =>
3733
val data = byteString.utf8String.stripPrefix(itemPrefix)
38-
if (data.equals(endOfStreamToken)) JsString(endOfStreamToken)
39-
else Json.parse(data)
34+
if (data.equals(endOfStreamToken)) JsString(endOfStreamToken) else Json.parse(data)
4035
}
4136

4237
protected def execJsonStreamAux(
43-
endPoint: PEP,
44-
method: String,
45-
endPointParam: Option[String] = None,
46-
params: Seq[(PT, Option[Any])] = Nil,
47-
bodyParams: Seq[(PT, Option[JsValue])] = Nil
48-
)(implicit
49-
materializer: Materializer
38+
endPoint: PEP,
39+
method: String,
40+
endPointParam: Option[String] = None,
41+
params: Seq[(PT, Option[Any])] = Nil,
42+
bodyParams: Seq[(PT, Option[JsValue])] = Nil)(
43+
implicit materializer: Materializer
5044
): Source[JsValue, NotUsed] = {
5145
val source = execStreamRequestAux[JsValue](
5246
endPoint,
@@ -56,14 +50,8 @@ trait WSStreamRequestHelper {
5650
bodyParams,
5751
Framing.delimiter(ByteString("\n\n"), 1000, allowTruncation = true),
5852
{
59-
case e: JsonParseException =>
60-
throw new OpenAIScalaClientException(
61-
s"$serviceName.$endPoint: 'Response is not a JSON. ${e.getMessage}."
62-
)
63-
case e: FramingException =>
64-
throw new OpenAIScalaClientException(
65-
s"$serviceName.$endPoint: 'Response is not a JSON. ${e.getMessage}."
66-
)
53+
case e: JsonParseException => throw new OpenAIScalaClientException(s"$serviceName.$endPoint: 'Response is not a JSON. ${e.getMessage}.")
54+
case e: FramingException => throw new OpenAIScalaClientException(s"$serviceName.$endPoint: 'Response is not a JSON. ${e.getMessage}.")
6755
}
6856
)
6957

@@ -72,41 +60,32 @@ trait WSStreamRequestHelper {
7260
}
7361

7462
protected def execStreamRequestAux[T](
75-
endPoint: PEP,
76-
method: String,
77-
endPointParam: Option[String],
78-
params: Seq[(PT, Option[Any])],
79-
bodyParams: Seq[(PT, Option[JsValue])],
80-
framing: Flow[ByteString, ByteString, NotUsed],
81-
recoverBlock: PartialFunction[Throwable, T]
82-
)(implicit
83-
um: Unmarshaller[ByteString, T],
84-
materializer: Materializer
63+
endPoint: PEP,
64+
method: String,
65+
endPointParam: Option[String],
66+
params: Seq[(PT, Option[Any])],
67+
bodyParams: Seq[(PT, Option[JsValue])],
68+
framing: Flow[ByteString, ByteString, NotUsed],
69+
recoverBlock: PartialFunction[Throwable, T])(
70+
implicit um: Unmarshaller[ByteString, T], materializer: Materializer
8571
): Source[T, NotUsed] = {
8672
val request = getWSRequestOptional(Some(endPoint), endPointParam, params)
8773

8874
val requestWithBody = if (bodyParams.nonEmpty) {
89-
val bodyParamsX = bodyParams.collect { case (fieldName, Some(jsValue)) =>
90-
(fieldName.toString, jsValue)
91-
}
75+
val bodyParamsX = bodyParams.collect { case (fieldName, Some(jsValue)) => (fieldName.toString, jsValue) }
9276
request.withBody(JsObject(bodyParamsX))
9377
} else
9478
request
9579

9680
val source =
9781
requestWithBody.withMethod(method).stream().map { response =>
98-
response.bodyAsSource
82+
response
83+
.bodyAsSource
9984
.via(framing)
100-
.mapAsync(1)(bytes => Unmarshal(bytes).to[T]) // unmarshal one by one
85+
.mapAsync(1)(bytes => Unmarshal(bytes).to[T]) // unmarshal one by one
10186
.recover {
102-
case e: TimeoutException =>
103-
throw new OpenAIScalaClientTimeoutException(
104-
s"$serviceName.$endPoint timed out: ${e.getMessage}."
105-
)
106-
case e: UnknownHostException =>
107-
throw new OpenAIScalaClientUnknownHostException(
108-
s"$serviceName.$endPoint cannot resolve a host name: ${e.getMessage}."
109-
)
87+
case e: TimeoutException => throw new OpenAIScalaClientTimeoutException(s"$serviceName.$endPoint timed out: ${e.getMessage}.")
88+
case e: UnknownHostException => throw new OpenAIScalaClientUnknownHostException(s"$serviceName.$endPoint cannot resolve a host name: ${e.getMessage}.")
11089
}
11190
.recover(recoverBlock) // extra recover
11291
}

openai-client/src/main/scala/io/cequence/openaiscala/ConfigImplicits.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,14 +5,12 @@ import com.typesafe.config.Config
55
object ConfigImplicits {
66
implicit class ConfigExt(config: Config) {
77
def optionalString(configPath: String): Option[String] =
8-
if (config.hasPath(configPath)) Some(config.getString(configPath))
9-
else None
8+
if (config.hasPath(configPath)) Some(config.getString(configPath)) else None
109

1110
def optionalInt(configPath: String): Option[Int] =
1211
if (config.hasPath(configPath)) Some(config.getInt(configPath)) else None
1312

1413
def optionalBoolean(configPath: String): Option[Boolean] =
15-
if (config.hasPath(configPath)) Some(config.getBoolean(configPath))
16-
else None
14+
if (config.hasPath(configPath)) Some(config.getBoolean(configPath)) else None
1715
}
1816
}

0 commit comments

Comments
 (0)