Skip to content

Commit 953eedf

Browse files
committed
fix KotlinTest formatting
1 parent 9472378 commit 953eedf

File tree

1 file changed

+107
-111
lines changed

1 file changed

+107
-111
lines changed

src/test/kotlin/KotlinTest.kt

Lines changed: 107 additions & 111 deletions
Original file line numberDiff line numberDiff line change
@@ -7,30 +7,26 @@ import com.cjcrafter.openai.chat.ChatResponse
77
import com.cjcrafter.openai.chat.ChatResponseChunk
88
import com.cjcrafter.openai.completions.CompletionRequest
99
import com.cjcrafter.openai.exception.OpenAIError
10-
import io.github.cdimascio.dotenv.Dotenv
1110
import io.github.cdimascio.dotenv.dotenv
1211
import java.util.*
1312

14-
object KotlinTest {
15-
16-
// Colors for pretty formatting
17-
const val RESET = "\u001b[0m"
18-
const val BLACK = "\u001b[0;30m"
19-
const val RED = "\u001b[0;31m"
20-
const val GREEN = "\u001b[0;32m"
21-
const val YELLOW = "\u001b[0;33m"
22-
const val BLUE = "\u001b[0;34m"
23-
const val PURPLE = "\u001b[0;35m"
24-
const val CYAN = "\u001b[0;36m"
25-
const val WHITE = "\u001b[0;37m"
26-
27-
@Throws(OpenAIError::class)
28-
@JvmStatic
29-
fun main(args: Array<String>) {
30-
val scanner = Scanner(System.`in`)
31-
32-
// Print out the menu of options
33-
println("""
13+
// Colors for pretty formatting
14+
const val RESET = "\u001b[0m"
15+
const val BLACK = "\u001b[0;30m"
16+
const val RED = "\u001b[0;31m"
17+
const val GREEN = "\u001b[0;32m"
18+
const val YELLOW = "\u001b[0;33m"
19+
const val BLUE = "\u001b[0;34m"
20+
const val PURPLE = "\u001b[0;35m"
21+
const val CYAN = "\u001b[0;36m"
22+
const val WHITE = "\u001b[0;37m"
23+
24+
@Throws(OpenAIError::class)
25+
fun main(args: Array<String>) {
26+
val scanner = Scanner(System.`in`)
27+
28+
// Print out the menu of options
29+
println("""
3430
${GREEN}Please select one of the options below by typing a number.
3531
1. Completion (create, sync)
3632
2. Completion (stream, sync)
@@ -40,111 +36,111 @@ object KotlinTest {
4036
6. Chat (stream, sync)
4137
7. Chat (create, async)
4238
8. Chat (stream, async)
43-
""".trimIndent())
44-
45-
when (scanner.nextLine().trim()) {
46-
"1" -> doCompletion(stream = false, async = false)
47-
"2" -> doCompletion(stream = true, async = false)
48-
"3" -> doCompletion(stream = false, async = true)
49-
"4" -> doCompletion(stream = true, async = true)
50-
"5" -> doChat(stream = false, async = false)
51-
"6" -> doChat(stream = true, async = false)
52-
"7" -> doChat(stream = false, async = true)
53-
"8" -> doChat(stream = true, async = true)
54-
else -> System.err.println("Invalid option")
55-
}
39+
""".trimIndent()
40+
)
41+
42+
when (scanner.nextLine().trim()) {
43+
"1" -> doCompletion(stream = false, async = false)
44+
"2" -> doCompletion(stream = true, async = false)
45+
"3" -> doCompletion(stream = false, async = true)
46+
"4" -> doCompletion(stream = true, async = true)
47+
"5" -> doChat(stream = false, async = false)
48+
"6" -> doChat(stream = true, async = false)
49+
"7" -> doChat(stream = false, async = true)
50+
"8" -> doChat(stream = true, async = true)
51+
else -> System.err.println("Invalid option")
5652
}
57-
58-
@Throws(OpenAIError::class)
59-
fun doCompletion(stream: Boolean, async: Boolean) {
60-
val scan = Scanner(System.`in`)
61-
println(YELLOW + "Enter completion: ")
62-
val input = scan.nextLine()
63-
64-
// CompletionRequest contains the data we sent to the OpenAI API. We use
65-
// 128 tokens, so we have a bit of a delay before the response (for testing).
66-
val request = CompletionRequest.builder()
67-
.model("davinci")
68-
.prompt(input)
69-
.maxTokens(128).build()
70-
71-
// Loads the API key from the .env file in the root directory.
72-
val key = Dotenv.load()["OPENAI_TOKEN"]
73-
val openai = OpenAI(key)
74-
println(RESET + "Generating Response" + PURPLE)
75-
if (stream) {
76-
if (async) {
77-
openai.streamCompletionAsync(request, { print(it[0].text) })
78-
println("$CYAN !!! Code has finished executing. Wait for async code to complete.$PURPLE")
79-
} else {
80-
openai.streamCompletion(request, { print(it[0].text) })
81-
}
53+
}
54+
55+
@Throws(OpenAIError::class)
56+
fun doCompletion(stream: Boolean, async: Boolean) {
57+
val scan = Scanner(System.`in`)
58+
println(YELLOW + "Enter completion: ")
59+
val input = scan.nextLine()
60+
61+
// CompletionRequest contains the data we sent to the OpenAI API. We use
62+
// 128 tokens, so we have a bit of a delay before the response (for testing).
63+
val request = CompletionRequest.builder()
64+
.model("davinci")
65+
.prompt(input)
66+
.maxTokens(128).build()
67+
68+
// Loads the API key from the .env file in the root directory.
69+
val key = dotenv()["OPENAI_TOKEN"]
70+
val openai = OpenAI(key)
71+
println(RESET + "Generating Response" + PURPLE)
72+
if (stream) {
73+
if (async) {
74+
openai.streamCompletionAsync(request, { print(it[0].text) })
75+
println("$CYAN !!! Code has finished executing. Wait for async code to complete.$PURPLE")
8276
} else {
83-
if (async) {
84-
openai.createCompletionAsync(request, { println(it[0].text) })
85-
println("$CYAN !!! Code has finished executing. Wait for async code to complete.$PURPLE")
86-
} else {
87-
println(openai.createCompletion(request)[0].text)
88-
}
77+
openai.streamCompletion(request, { print(it[0].text) })
78+
}
79+
} else {
80+
if (async) {
81+
openai.createCompletionAsync(request, { println(it[0].text) })
82+
println("$CYAN !!! Code has finished executing. Wait for async code to complete.$PURPLE")
83+
} else {
84+
println(openai.createCompletion(request)[0].text)
8985
}
9086
}
87+
}
9188

92-
@Throws(OpenAIError::class)
93-
fun doChat(stream: Boolean, async: Boolean) {
94-
val scan = Scanner(System.`in`)
89+
@Throws(OpenAIError::class)
90+
fun doChat(stream: Boolean, async: Boolean) {
91+
val scan = Scanner(System.`in`)
9592

96-
// This is the prompt that the bot will refer back to for every message.
97-
val prompt = "You are a customer support chat-bot. Write brief summaries of the user's questions so that agents can easily find the answer in a database.".toSystemMessage()
93+
// This is the prompt that the bot will refer back to for every message.
94+
val prompt = "You are a customer support chat-bot. Write brief summaries of the user's questions so that agents can easily find the answer in a database.".toSystemMessage()
9895

99-
// Use a mutable (modifiable) list! Always! You should be reusing the
100-
// ChatRequest variable, so in order for a conversation to continue
101-
// you need to be able to modify the list.
102-
val messages: MutableList<ChatMessage> = ArrayList(listOf(prompt))
96+
// Use a mutable (modifiable) list! Always! You should be reusing the
97+
// ChatRequest variable, so in order for a conversation to continue
98+
// you need to be able to modify the list.
99+
val messages: MutableList<ChatMessage> = ArrayList(listOf(prompt))
103100

104-
// ChatRequest is the request we send to OpenAI API. You can modify the
105-
// model, temperature, maxTokens, etc. This should be saved, so you can
106-
// reuse it for a conversation.
107-
val request = ChatRequest(model="gpt-3.5-turbo", messages=messages)
101+
// ChatRequest is the request we send to OpenAI API. You can modify the
102+
// model, temperature, maxTokens, etc. This should be saved, so you can
103+
// reuse it for a conversation.
104+
val request = ChatRequest(model = "gpt-3.5-turbo", messages = messages)
108105

109-
// Loads the API key from the .env file in the root directory.
110-
val key = dotenv()["OPENAI_TOKEN"]
111-
val openai = OpenAI(key)
106+
// Loads the API key from the .env file in the root directory.
107+
val key = dotenv()["OPENAI_TOKEN"]
108+
val openai = OpenAI(key)
112109

113-
// The conversation lasts until the user quits the program
114-
while (true) {
110+
// The conversation lasts until the user quits the program
111+
while (true) {
115112

116-
// Prompt the user to enter a response
117-
println("\n${YELLOW}Enter text below:\n")
118-
val input = scan.nextLine()
113+
// Prompt the user to enter a response
114+
println("\n${YELLOW}Enter text below:\n")
115+
val input = scan.nextLine()
119116

120-
// Add the newest user message to the conversation
121-
messages.add(input.toUserMessage())
122-
println(RESET + "Generating Response" + PURPLE)
123-
if (stream) {
124-
if (async) {
125-
openai.streamChatCompletionAsync(request, { response: ChatResponseChunk ->
126-
print(response[0].delta)
127-
if (response[0].isFinished()) messages.add(response[0].message)
128-
})
129-
println("$CYAN !!! Code has finished executing. Wait for async code to complete.$PURPLE")
130-
} else {
131-
openai.streamChatCompletion(request, { response: ChatResponseChunk ->
132-
print(response[0].delta)
133-
if (response[0].isFinished()) messages.add(response[0].message)
134-
})
135-
}
117+
// Add the newest user message to the conversation
118+
messages.add(input.toUserMessage())
119+
println(RESET + "Generating Response" + PURPLE)
120+
if (stream) {
121+
if (async) {
122+
openai.streamChatCompletionAsync(request, { response: ChatResponseChunk ->
123+
print(response[0].delta)
124+
if (response[0].isFinished()) messages.add(response[0].message)
125+
})
126+
println("$CYAN !!! Code has finished executing. Wait for async code to complete.$PURPLE")
136127
} else {
137-
if (async) {
138-
openai.createChatCompletionAsync(request, { response: ChatResponse ->
139-
println(response[0].message.content)
140-
messages.add(response[0].message)
141-
})
142-
println("$CYAN !!! Code has finished executing. Wait for async code to complete.$PURPLE")
143-
} else {
144-
val response = openai.createChatCompletion(request)
128+
openai.streamChatCompletion(request, { response: ChatResponseChunk ->
129+
print(response[0].delta)
130+
if (response[0].isFinished()) messages.add(response[0].message)
131+
})
132+
}
133+
} else {
134+
if (async) {
135+
openai.createChatCompletionAsync(request, { response: ChatResponse ->
145136
println(response[0].message.content)
146137
messages.add(response[0].message)
147-
}
138+
})
139+
println("$CYAN !!! Code has finished executing. Wait for async code to complete.$PURPLE")
140+
} else {
141+
val response = openai.createChatCompletion(request)
142+
println(response[0].message.content)
143+
messages.add(response[0].message)
148144
}
149145
}
150146
}

0 commit comments

Comments
 (0)