Skip to content

Commit 2e890df

Browse files
authored
Android Qwen thinking mode prompt support (#10668)
Use different prompts according to mode
1 parent d4c9a30 commit 2e890df

File tree

4 files changed

+42
-20
lines changed

4 files changed

+42
-20
lines changed

examples/demo-apps/android/LlamaDemo/app/src/main/java/com/example/executorchllamademo/MainActivity.java

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -692,7 +692,10 @@ private String getConversationHistory() {
692692
prevPromptID = currentPromptID;
693693
}
694694
if (conversation.getIsSent()) {
695-
format = format.replace(PromptFormat.USER_PLACEHOLDER, conversation.getText());
695+
format =
696+
format
697+
.replace(PromptFormat.USER_PLACEHOLDER, conversation.getText())
698+
.replace(PromptFormat.THINKING_MODE_PLACEHOLDER, "");
696699
} else {
697700
format = format.replace(PromptFormat.ASSISTANT_PLACEHOLDER, conversation.getText());
698701
}
@@ -704,12 +707,12 @@ private String getConversationHistory() {
704707

705708
private String getTotalFormattedPrompt(String conversationHistory, String rawPrompt) {
706709
if (conversationHistory.isEmpty()) {
707-
return mCurrentSettingsFields.getFormattedSystemAndUserPrompt(rawPrompt);
710+
return mCurrentSettingsFields.getFormattedSystemAndUserPrompt(rawPrompt, mThinkMode);
708711
}
709712

710713
return mCurrentSettingsFields.getFormattedSystemPrompt()
711714
+ conversationHistory
712-
+ mCurrentSettingsFields.getFormattedUserPrompt(rawPrompt);
715+
+ mCurrentSettingsFields.getFormattedUserPrompt(rawPrompt, mThinkMode);
713716
}
714717

715718
private void onModelRunStarted() {
@@ -738,7 +741,8 @@ private void onModelRunStopped() {
738741
if (ModelUtils.getModelCategory(
739742
mCurrentSettingsFields.getModelType(), mCurrentSettingsFields.getBackendType())
740743
== ModelUtils.VISION_MODEL) {
741-
finalPrompt = mCurrentSettingsFields.getFormattedSystemAndUserPrompt(rawPrompt);
744+
finalPrompt =
745+
mCurrentSettingsFields.getFormattedSystemAndUserPrompt(rawPrompt, mThinkMode);
742746
} else {
743747
finalPrompt = getTotalFormattedPrompt(getConversationHistory(), rawPrompt);
744748
}

examples/demo-apps/android/LlamaDemo/app/src/main/java/com/example/executorchllamademo/PromptFormat.java

Lines changed: 21 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ public class PromptFormat {
1313
public static final String SYSTEM_PLACEHOLDER = "{{ system_prompt }}";
1414
public static final String USER_PLACEHOLDER = "{{ user_prompt }}";
1515
public static final String ASSISTANT_PLACEHOLDER = "{{ assistant_response }}";
16+
public static final String THINKING_MODE_PLACEHOLDER = "{{ thinking_mode }}";
1617
public static final String DEFAULT_SYSTEM_PROMPT = "Answer the questions in a few sentences";
1718

1819
public static String getSystemPromptTemplate(ModelType modelType) {
@@ -32,7 +33,7 @@ public static String getSystemPromptTemplate(ModelType modelType) {
3233
}
3334
}
3435

35-
public static String getUserPromptTemplate(ModelType modelType) {
36+
public static String getUserPromptTemplate(ModelType modelType, boolean thinkingMode) {
3637
switch (modelType) {
3738
case LLAMA_3:
3839
case LLAMA_3_1:
@@ -43,15 +44,13 @@ public static String getUserPromptTemplate(ModelType modelType) {
4344
+ "<|eot_id|>"
4445
+ "<|start_header_id|>assistant<|end_header_id|>";
4546

46-
case LLAVA_1_5:
4747
case QWEN_3:
4848
return "<|im_start|>user\n"
4949
+ USER_PLACEHOLDER
50-
+ "<|im_end|>\n"
50+
+ "\n<|im_end|>\n"
5151
+ "<|im_start|>assistant\n"
52-
+ "<think>\n"
53-
+ "\n"
54-
+ "</think>\n\n\n";
52+
+ THINKING_MODE_PLACEHOLDER;
53+
case LLAVA_1_5:
5554
default:
5655
return USER_PLACEHOLDER;
5756
}
@@ -62,9 +61,14 @@ public static String getConversationFormat(ModelType modelType) {
6261
case LLAMA_3:
6362
case LLAMA_3_1:
6463
case LLAMA_3_2:
65-
return getUserPromptTemplate(modelType) + "\n" + ASSISTANT_PLACEHOLDER + "<|eot_id|>";
64+
return getUserPromptTemplate(modelType, false)
65+
+ "\n"
66+
+ ASSISTANT_PLACEHOLDER
67+
+ "<|eot_id|>";
6668
case LLAVA_1_5:
6769
return USER_PLACEHOLDER + " ASSISTANT:";
70+
case QWEN_3:
71+
return getUserPromptTemplate(modelType, false) + "<|im_end|>\n";
6872
default:
6973
return USER_PLACEHOLDER;
7074
}
@@ -86,13 +90,22 @@ public static String getStopToken(ModelType modelType) {
8690
}
8791
}
8892

93+
public static String getThinkingModeToken(ModelType modelType, boolean thinkingMode) {
94+
switch (modelType) {
95+
case QWEN_3:
96+
return thinkingMode ? "" : "<think>\n\n</think>\n\n\n";
97+
default:
98+
return "";
99+
}
100+
}
101+
89102
public static String getLlavaPresetPrompt() {
90103
return "A chat between a curious human and an artificial intelligence assistant. The assistant"
91104
+ " gives helpful, detailed, and polite answers to the human's questions. USER: ";
92105
}
93106

94107
public static String getFormattedLlamaGuardPrompt(String userPrompt) {
95-
return getUserPromptTemplate(ModelType.LLAMA_GUARD_3)
108+
return getUserPromptTemplate(ModelType.LLAMA_GUARD_3, false)
96109
.replace(
97110
USER_PLACEHOLDER, getLlamaGuardPresetPrompt().replace(USER_PLACEHOLDER, userPrompt));
98111
}

examples/demo-apps/android/LlamaDemo/app/src/main/java/com/example/executorchllamademo/SettingsActivity.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -272,7 +272,8 @@ public void afterTextChanged(Editable s) {
272272
new DialogInterface.OnClickListener() {
273273
public void onClick(DialogInterface dialog, int whichButton) {
274274
// Clear the messageAdapter and sharedPreference
275-
mUserPromptEditText.setText(PromptFormat.getUserPromptTemplate(mModelType));
275+
mUserPromptEditText.setText(
276+
PromptFormat.getUserPromptTemplate(mModelType, false));
276277
}
277278
})
278279
.setNegativeButton(android.R.string.no, null)
@@ -295,7 +296,7 @@ private void showInvalidPromptDialog() {
295296
.setPositiveButton(
296297
android.R.string.yes,
297298
(dialog, whichButton) -> {
298-
mUserPromptEditText.setText(PromptFormat.getUserPromptTemplate(mModelType));
299+
mUserPromptEditText.setText(PromptFormat.getUserPromptTemplate(mModelType, false));
299300
})
300301
.setNegativeButton(android.R.string.no, null)
301302
.show();
@@ -377,7 +378,7 @@ private void setupModelTypeSelectorDialog() {
377378
(dialog, item) -> {
378379
mModelTypeTextView.setText(modelTypes[item]);
379380
mModelType = ModelType.valueOf(modelTypes[item]);
380-
mUserPromptEditText.setText(PromptFormat.getUserPromptTemplate(mModelType));
381+
mUserPromptEditText.setText(PromptFormat.getUserPromptTemplate(mModelType, false));
381382
dialog.dismiss();
382383
});
383384

examples/demo-apps/android/LlamaDemo/app/src/main/java/com/example/executorchllamademo/SettingsFields.java

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -38,17 +38,21 @@ public String getUserPrompt() {
3838
return userPrompt;
3939
}
4040

41-
public String getFormattedSystemAndUserPrompt(String prompt) {
42-
return getFormattedSystemPrompt() + getFormattedUserPrompt(prompt);
41+
public String getFormattedSystemAndUserPrompt(String prompt, boolean thinkingMode) {
42+
return getFormattedSystemPrompt() + getFormattedUserPrompt(prompt, thinkingMode);
4343
}
4444

4545
public String getFormattedSystemPrompt() {
4646
return PromptFormat.getSystemPromptTemplate(modelType)
4747
.replace(PromptFormat.SYSTEM_PLACEHOLDER, systemPrompt);
4848
}
4949

50-
public String getFormattedUserPrompt(String prompt) {
51-
return userPrompt.replace(PromptFormat.USER_PLACEHOLDER, prompt);
50+
public String getFormattedUserPrompt(String prompt, boolean thinkingMode) {
51+
return userPrompt
52+
.replace(PromptFormat.USER_PLACEHOLDER, prompt)
53+
.replace(
54+
PromptFormat.THINKING_MODE_PLACEHOLDER,
55+
PromptFormat.getThinkingModeToken(modelType, thinkingMode));
5256
}
5357

5458
public boolean getIsClearChatHistory() {
@@ -77,7 +81,7 @@ public SettingsFields() {
7781
tokenizerFilePath = "";
7882
temperature = SettingsActivity.TEMPERATURE_MIN_VALUE;
7983
systemPrompt = "";
80-
userPrompt = PromptFormat.getUserPromptTemplate(DEFAULT_MODEL);
84+
userPrompt = PromptFormat.getUserPromptTemplate(DEFAULT_MODEL, false);
8185
isClearChatHistory = false;
8286
isLoadModel = false;
8387
modelType = DEFAULT_MODEL;

0 commit comments

Comments
 (0)