Skip to content

Commit bf640cd

Browse files
committed
refactor(llm): convert LlmFactory to singleton object
- Changed LlmFactory from a class to a singleton object to simplify instantiation. - Updated all references to use LlmFactory.create() instead of LlmFactory() or LlmFactory.instance.create(). - Removed unnecessary project parameter from testLLMConnection(). - Added LLMProvider2 for improved LLM session management and testing.
1 parent 86a3e4e commit bf640cd

File tree

29 files changed

+639
-62
lines changed

29 files changed

+639
-62
lines changed

build.gradle.kts

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -402,6 +402,11 @@ project(":core") {
402402

403403
implementation("io.reactivex.rxjava3:rxjava:3.1.10")
404404

405+
testImplementation("org.jetbrains.kotlinx:kotlinx-coroutines-test:1.10.1")
406+
testImplementation(kotlin("test"))
407+
testImplementation("com.squareup.okhttp3:mockwebserver:4.12.0") {
408+
excludeKotlinDeps()
409+
}
405410
implementation("com.squareup.okhttp3:okhttp:4.12.0") {
406411
excludeKotlinDeps()
407412
}
@@ -683,6 +688,10 @@ project(":exts:devins-lang") {
683688
}
684689
}
685690

691+
tasks.test {
692+
useJUnitPlatform()
693+
}
694+
686695
fun File.isPluginJar(): Boolean {
687696
if (!isFile) return false
688697
if (extension != "jar") return false

core/src/main/kotlin/cc/unitmesh/devti/custom/document/CustomLivingDocTask.kt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ class CustomLivingDocTask(
2626
logger.info("Prompt: $prompt")
2727

2828
val stream =
29-
LlmFactory().create(project).stream(prompt, "", false)
29+
LlmFactory.create(project).stream(prompt, "", false)
3030

3131
var result = ""
3232

core/src/main/kotlin/cc/unitmesh/devti/custom/tasks/FileGenerateTask.kt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ class FileGenerateTask(
3434
val systemPrompt = messages.filter { it.role == LlmMsg.ChatRole.System }.joinToString("\n") { it.content }
3535

3636
val stream =
37-
LlmFactory().create(project).stream(requestPrompt, systemPrompt, false)
37+
LlmFactory.create(project).stream(requestPrompt, systemPrompt, false)
3838

3939
var result = ""
4040
runBlocking {

core/src/main/kotlin/cc/unitmesh/devti/diff/DiffStreamHandler.kt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ class DiffStreamHandler(
123123
val lines = originContent.lines()
124124

125125
isRunning = true
126-
val flow: Flow<String> = LlmFactory.instance.create(project).stream(prompt, "", false)
126+
val flow: Flow<String> = LlmFactory.create(project).stream(prompt, "", false)
127127
var lastLineNo = 0
128128
AutoDevCoroutineScope.scope(project).launch {
129129
val suggestion = StringBuilder()

core/src/main/kotlin/cc/unitmesh/devti/gui/chat/ChatCodingService.kt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import kotlinx.coroutines.flow.Flow
2121
import kotlinx.coroutines.launch
2222

2323
class ChatCodingService(var actionType: ChatActionType, val project: Project) {
24-
private val llmProvider = LlmFactory().create(project)
24+
private val llmProvider = LlmFactory.create(project)
2525
private val counitProcessor = project.service<CustomAgentChatProcessor>()
2626
private var currentJob: Job? = null
2727

core/src/main/kotlin/cc/unitmesh/devti/inline/AutoDevInlineChatPanel.kt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ class AutoDevInlineChatPanel(val editor: Editor) : JPanel(GridBagLayout()), Edit
4040
val project = editor.project!!
4141

4242
val prompt = AutoDevInlineChatService.getInstance().prompting(project, input, editor)
43-
val flow: Flow<String>? = LlmFactory.instance.create(project).stream(prompt, "", false)
43+
val flow: Flow<String>? = LlmFactory.create(project).stream(prompt, "", false)
4444

4545
val panelView = SketchToolWindow(project, editor)
4646
panelView.minimumSize = Dimension(800, 40)

core/src/main/kotlin/cc/unitmesh/devti/intentions/action/task/BaseCompletionTask.kt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ abstract class BaseCompletionTask(private val request: CodeCompletionRequest) :
4141
val prompt = promptText()
4242

4343
val keepHistory = keepHistory() && prompt.length < AutoDevSettingsState.maxTokenLength
44-
val flow: Flow<String> = LlmFactory().create(request.project).stream(prompt, "", keepHistory)
44+
val flow: Flow<String> = LlmFactory.create(request.project).stream(prompt, "", keepHistory)
4545
logger.info("Prompt: $prompt")
4646

4747
DumbAwareAction.create {

core/src/main/kotlin/cc/unitmesh/devti/intentions/action/task/CodeCompletionTask.kt

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,6 @@ import kotlin.jvm.internal.Ref
2727
class CodeCompletionTask(private val request: CodeCompletionRequest) :
2828
Task.Backgroundable(request.project, AutoDevBundle.message("intentions.chat.code.complete.name")) {
2929

30-
private val llmFactory = LlmFactory()
31-
3230
private val writeActionGroupId = "code.complete.intention.write.action"
3331
private val codeMessage = AutoDevBundle.message("intentions.chat.code.complete.name")
3432

@@ -39,7 +37,7 @@ class CodeCompletionTask(private val request: CodeCompletionRequest) :
3937
override fun run(indicator: ProgressIndicator) {
4038
val prompt = promptText()
4139

42-
val flow: Flow<String> = llmFactory.create(request.project).stream(prompt, "", false)
40+
val flow: Flow<String> = LlmFactory.create(request.project).stream(prompt, "", false)
4341
logger.info("Prompt: $prompt")
4442

4543
val editor = request.editor
@@ -92,7 +90,7 @@ class CodeCompletionTask(private val request: CodeCompletionRequest) :
9290
logger.warn("Prompt: $prompt")
9391
AutoDevCoroutineScope.scope(project).launch {
9492
try {
95-
val flow: Flow<String> = llmFactory.createForInlayCodeComplete(project).stream(prompt, "", false)
93+
val flow: Flow<String> = LlmFactory.createForInlayCodeComplete(project).stream(prompt, "", false)
9694
val suggestion = StringBuilder()
9795
flow.collect {
9896
AutoDevStatusService.notifyApplication(AutoDevStatus.InProgress)

core/src/main/kotlin/cc/unitmesh/devti/intentions/action/task/LivingDocumentationTask.kt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ class LivingDocumentationTask(
3535

3636
logger.info("Prompt: $prompt")
3737

38-
val stream = LlmFactory().create(project).stream(prompt, "", false)
38+
val stream = LlmFactory.create(project).stream(prompt, "", false)
3939

4040
var result = ""
4141

core/src/main/kotlin/cc/unitmesh/devti/intentions/action/task/TestCodeGenTask.kt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ class TestCodeGenTask(val request: TestCodeGenRequest, displayMessage: String) :
122122
indicator.text = AutoDevBundle.message("intentions.request.background.process.title")
123123

124124
val flow: Flow<String> = try {
125-
LlmFactory().create(request.project).stream(prompter, "", false)
125+
LlmFactory.create(request.project).stream(prompter, "", false)
126126
} catch (e: Exception) {
127127
AutoDevStatusService.notifyApplication(AutoDevStatus.Error)
128128
logger.error("Failed to create LLM for: $lang", e)
Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
package cc.unitmesh.devti.llm2
2+
3+
import cc.unitmesh.devti.llm2.MessageStatus.*
4+
import cc.unitmesh.devti.llms.custom.Message
5+
import kotlinx.serialization.encodeToString
6+
import kotlinx.serialization.json.Json
7+
8+
/**
9+
* 后台返回消息状态:[BEGIN] 首次返回 [CONTENT] 中间内容 [END] 内容结束
10+
*/
11+
enum class MessageStatus {
12+
BEGIN, CONTENT, END
13+
}
14+
15+
/**
16+
* 聊天会话,每个会话包含此次会话的所有消息和名称。
17+
*/
18+
data class ChatSession<T>(
19+
val conversionName: String,
20+
val chatHistory: List<SessionMessageItem<T>>,
21+
/**
22+
* 当前会话状态,为 [MessageStatus.END] 时才允许发下一个对话
23+
*/
24+
var status: MessageStatus = END,
25+
) {
26+
companion object {
27+
/**
28+
* 为方便使用,暂时以 invoke 工厂提供默认的 ChatSession 实现
29+
*/
30+
operator fun invoke(conversionName: String, messages: List<Message> = listOf()): ChatSession<Message> =
31+
ChatSession<Message>(conversionName, messages.map { SessionMessageItem(it, END) })
32+
}
33+
}
34+
35+
fun <T> ChatSession<T>.canSendNextMessage() = this.status == END
36+
37+
fun <T> ChatSession<T>.appendMessage(newMessage: T, status: MessageStatus = CONTENT): ChatSession<T> =
38+
this.copy(chatHistory = this.chatHistory.toMutableList().apply {
39+
add(SessionMessageItem(newMessage, status))
40+
})
41+
42+
/**
43+
* http 请求的 body 数据
44+
*/
45+
val ChatSession<Message>.httpContent: String
46+
get() {
47+
return chatHistory.filter {
48+
// exception 不需要发送给 llm server
49+
// TODO 对异常消息的处理需有更合理的设计
50+
it.chatMessage.role != "Error"
51+
}.joinToString(prefix = """{"messages":[""", postfix = "]}", separator = ",") { item ->
52+
Json.encodeToString(item.chatMessage)
53+
}
54+
}
55+
56+
/**
57+
* 聊天消息项
58+
* 后续可扩展为多模态消息,T 可为做任意内容
59+
*
60+
* @property chatMessage 聊天消息
61+
* @property status 消息状态,标记此消息的状态是刚开始、中间还是消息已经结束。
62+
*/
63+
open class SessionMessageItem<T>(
64+
val chatMessage: T,
65+
val status: MessageStatus = CONTENT,
66+
) {
67+
companion object {
68+
69+
/**
70+
* 为方便使用,暂时以 invoke 工厂提供默认的文本实现
71+
*/
72+
operator fun invoke(
73+
chatMessage: Message,
74+
status: MessageStatus = CONTENT,
75+
): SessionMessageItem<Message> =
76+
DefaultSessionMessageItem(chatMessage, status)
77+
78+
val Null = SessionMessageItem(Message("", ""), END)
79+
}
80+
}
81+
82+
/**
83+
* 默认的聊天消息项
84+
*/
85+
private class DefaultSessionMessageItem(
86+
chatMessage: Message,
87+
status: MessageStatus = CONTENT,
88+
) : SessionMessageItem<Message>(chatMessage, status)

0 commit comments

Comments
 (0)