|
1 | 1 | package com.cjcrafter.openai
|
2 | 2 |
|
3 | 3 | import com.cjcrafter.openai.chat.*
|
| 4 | +import com.cjcrafter.openai.chat.tool.ToolChoice |
4 | 5 | import com.cjcrafter.openai.completions.CompletionRequest
|
5 | 6 | import com.cjcrafter.openai.completions.CompletionResponse
|
6 | 7 | import com.cjcrafter.openai.completions.CompletionResponseChunk
|
7 |
| -import com.cjcrafter.openai.exception.OpenAIError |
8 |
| -import com.cjcrafter.openai.gson.ChatChoiceChunkAdapter |
9 |
| -import com.cjcrafter.openai.gson.ChatUserAdapter |
10 |
| -import com.cjcrafter.openai.gson.FinishReasonAdapter |
11 | 8 | import com.google.gson.Gson
|
12 | 9 | import com.google.gson.GsonBuilder
|
13 | 10 | import okhttp3.OkHttpClient
|
14 | 11 |
|
15 | 12 | interface OpenAI {
|
16 | 13 |
|
17 |
| - @Throws(OpenAIError::class) |
| 14 | + /** |
| 15 | + * Calls the [completions](https://platform.openai.com/docs/api-reference/completions) |
| 16 | + * API endpoint. This method is blocking. |
| 17 | + * |
| 18 | + * Completions are considered Legacy, and OpenAI officially recommends that |
| 19 | + * all developers use the **chat completion** endpoint instead. See |
| 20 | + * [createChatCompletion]. |
| 21 | + * |
| 22 | + * @param request The request to send to the API |
| 23 | + * @return The response from the API |
| 24 | + */ |
18 | 25 | fun createCompletion(request: CompletionRequest): CompletionResponse
|
19 | 26 |
|
20 |
| - @Throws(OpenAIError::class) |
| 27 | + /** |
| 28 | + * Calls the [completions](https://platform.openai.com/docs/api-reference/completions) |
| 29 | + * API endpoint and streams each token 1 at a time for a faster response |
| 30 | + * time. |
| 31 | + * |
| 32 | + * This method is **technically** not blocking, but the returned iterable |
| 33 | + * will block until the next token is generated. |
| 34 | + * ``` |
| 35 | + * // Each iteration of the loop will block until the next token is streamed |
| 36 | + * for (chunk in openAI.streamCompletion(request)) { |
| 37 | + * // Do something with the chunk |
| 38 | + * } |
| 39 | + * ``` |
| 40 | + * |
| 41 | + * Completions are considered Legacy, and OpenAI officially recommends that |
| 42 | + * all developers use the **chat completion** endpoint isntead. See |
| 43 | + * [streamChatCompletion]. |
| 44 | + * |
| 45 | + * @param request The request to send to the API |
| 46 | + * @return The response from the API |
| 47 | + */ |
21 | 48 | fun streamCompletion(request: CompletionRequest): Iterable<CompletionResponseChunk>
|
22 | 49 |
|
23 |
| - @Throws(OpenAIError::class) |
| 50 | + /** |
| 51 | + * Calls the [chat completions](https://platform.openai.com/docs/api-reference/chat) |
| 52 | + * API endpoint. This method is blocking. |
| 53 | + * |
| 54 | + * @param request The request to send to the API |
| 55 | + * @return The response from the API |
| 56 | + */ |
24 | 57 | fun createChatCompletion(request: ChatRequest): ChatResponse
|
25 | 58 |
|
26 |
| - @Throws(OpenAIError::class) |
| 59 | + /** |
| 60 | + * Calls the [chat completions](https://platform.openai.com/docs/api-reference/chat) |
| 61 | + * API endpoint and streams each token 1 at a time for a faster response. |
| 62 | + * |
| 63 | + * This method is **technically** not blocking, but the returned iterable |
| 64 | + * will block until the next token is generated. |
| 65 | + * ``` |
| 66 | + * // Each iteration of the loop will block until the next token is streamed |
| 67 | + * for (chunk in openAI.streamChatCompletion(request)) { |
| 68 | + * // Do something with the chunk |
| 69 | + * } |
| 70 | + * ``` |
| 71 | + * |
| 72 | + * @param request The request to send to the API |
| 73 | + * @return The response from the API |
| 74 | + */ |
27 | 75 | fun streamChatCompletion(request: ChatRequest): Iterable<ChatResponseChunk>
|
28 | 76 |
|
29 |
| - open class Builder { |
| 77 | + open class Builder internal constructor() { |
30 | 78 | protected var apiKey: String? = null
|
31 | 79 | protected var organization: String? = null
|
32 | 80 | protected var client: OkHttpClient = OkHttpClient()
|
33 | 81 |
|
34 | 82 | fun apiKey(apiKey: String) = apply { this.apiKey = apiKey }
|
35 |
| - |
36 | 83 | fun organization(organization: String?) = apply { this.organization = organization }
|
37 |
| - |
38 | 84 | fun client(client: OkHttpClient) = apply { this.client = client }
|
39 | 85 |
|
40 | 86 | open fun build(): OpenAI {
|
41 |
| - checkNotNull(apiKey) { "apiKey must be defined to use OpenAI" } |
42 |
| - return OpenAIImpl(apiKey!!, organization, client) |
| 87 | + return OpenAIImpl( |
| 88 | + apiKey ?: throw IllegalStateException("apiKey must be defined to use OpenAI"), |
| 89 | + organization, |
| 90 | + client |
| 91 | + ) |
43 | 92 | }
|
44 | 93 | }
|
45 | 94 |
|
46 |
| - class AzureBuilder : Builder() { |
| 95 | + class AzureBuilder internal constructor(): Builder() { |
47 | 96 | private var azureBaseUrl: String? = null
|
48 | 97 | private var apiVersion: String? = null
|
49 | 98 | private var modelName: String? = null
|
50 | 99 |
|
51 | 100 | fun azureBaseUrl(azureBaseUrl: String) = apply { this.azureBaseUrl = azureBaseUrl }
|
52 |
| - |
53 | 101 | fun apiVersion(apiVersion: String) = apply { this.apiVersion = apiVersion }
|
54 |
| - |
55 | 102 | fun modelName(modelName: String) = apply { this.modelName = modelName }
|
56 | 103 |
|
57 | 104 | override fun build(): OpenAI {
|
58 |
| - checkNotNull(apiKey) { "apiKey must be defined to use OpenAI" } |
59 |
| - checkNotNull(azureBaseUrl) { "azureBaseUrl must be defined for azure" } |
60 |
| - checkNotNull(apiVersion) { "apiVersion must be defined for azure" } |
61 |
| - checkNotNull(modelName) { "modelName must be defined for azure" } |
62 |
| - |
63 |
| - return AzureOpenAI(apiKey!!, organization, client, azureBaseUrl!!, apiVersion!!, modelName!!) |
| 105 | + return AzureOpenAI( |
| 106 | + apiKey ?: throw IllegalStateException("apiKey must be defined to use OpenAI"), |
| 107 | + organization, |
| 108 | + client, |
| 109 | + azureBaseUrl ?: throw IllegalStateException("azureBaseUrl must be defined for azure"), |
| 110 | + apiVersion ?: throw IllegalStateException("apiVersion must be defined for azure"), |
| 111 | + modelName ?: throw IllegalStateException("modelName must be defined for azure") |
| 112 | + ) |
64 | 113 | }
|
65 | 114 | }
|
66 | 115 |
|
67 | 116 | companion object {
|
68 | 117 |
|
| 118 | + /** |
| 119 | + * Instantiates a builder for a default OpenAI instance. For Azure's |
| 120 | + * OpenAI, use [azureBuilder] instead. |
| 121 | + */ |
69 | 122 | @JvmStatic
|
70 | 123 | fun builder() = Builder()
|
71 | 124 |
|
| 125 | + /** |
| 126 | + * Instantiates a builder for an Azure OpenAI. |
| 127 | + */ |
72 | 128 | @JvmStatic
|
73 | 129 | fun azureBuilder() = AzureBuilder()
|
74 | 130 |
|
| 131 | + /** |
| 132 | + * Returns a Gson instance with the default OpenAI adapters registered. |
| 133 | + * This can be used to save conversations (and other data) to file. |
| 134 | + */ |
75 | 135 | @JvmStatic
|
76 | 136 | fun createGson(): Gson = createGsonBuilder().create()
|
77 | 137 |
|
| 138 | + /** |
| 139 | + * Returns a GsonBuilder instance with the default OpenAI adapters |
| 140 | + * registered. |
| 141 | + */ |
78 | 142 | @JvmStatic
|
79 | 143 | fun createGsonBuilder(): GsonBuilder {
|
80 | 144 | return GsonBuilder()
|
81 |
| - .registerTypeAdapter(ChatUser::class.java, ChatUserAdapter()) |
82 |
| - .registerTypeAdapter(FinishReason::class.java, FinishReasonAdapter()) |
83 |
| - .registerTypeAdapter(ChatChoiceChunk::class.java, ChatChoiceChunkAdapter()) |
| 145 | + .serializeNulls() |
| 146 | + .registerTypeAdapter(ChatChoiceChunk::class.java, ChatChoiceChunk.adapter()) |
| 147 | + .registerTypeAdapter(ToolChoice::class.java, ToolChoice.adapter()) |
84 | 148 | }
|
85 | 149 |
|
| 150 | + /** |
| 151 | + * Extension function to stream a completion using kotlin coroutines. |
| 152 | + */ |
86 | 153 | fun OpenAI.streamCompletion(request: CompletionRequest, consumer: (CompletionResponseChunk) -> Unit) {
|
87 | 154 | for (chunk in streamCompletion(request))
|
88 | 155 | consumer(chunk)
|
89 | 156 | }
|
90 | 157 |
|
| 158 | + /** |
| 159 | + * Extension function to stream a chat completion using kotlin coroutines. |
| 160 | + */ |
91 | 161 | fun OpenAI.streamChatCompletion(request: ChatRequest, consumer: (ChatResponseChunk) -> Unit) {
|
92 | 162 | for (chunk in streamChatCompletion(request))
|
93 | 163 | consumer(chunk)
|
|
0 commit comments