Skip to content

Commit dd0e617

Browse files
committed
server: public: move default parameters to its own file
This commit enables users to easily update the parameters with desired default values without getting familiar with frontend details.
1 parent 46e12c4 commit dd0e617

File tree

3 files changed

+49
-35
lines changed

3 files changed

+49
-35
lines changed

examples/server/README.md

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -690,3 +690,12 @@ You can use html formatting if needed.
690690
</body>
691691
</html>
692692
```
693+
694+
### Update default parameters
695+
696+
Update with desired values in `examples/server/public/params.js` and set
697+
`--path` flag to the server. For instance,
698+
699+
```bash
700+
--path /path/to/llama.cpp/examples/server/public/
701+
```

examples/server/public/index.html

Lines changed: 4 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -203,44 +203,13 @@
203203

204204
import { llama } from './completion.js';
205205
import { SchemaConverter } from './json-schema-to-grammar.mjs';
206+
207+
// load default session values and parameters
208+
import { session, params } from './params.js';
209+
206210
let selected_image = false;
207211
var slot_id = -1;
208212

209-
const session = signal({
210-
prompt: "This is a conversation between User and Llama, a friendly chatbot. Llama is helpful, kind, honest, good at writing, and never fails to answer any requests immediately and with precision.",
211-
template: "{{prompt}}\n\n{{history}}\n{{char}}:",
212-
historyTemplate: "{{name}}: {{message}}",
213-
transcript: [],
214-
type: "chat", // "chat" | "completion"
215-
char: "Llama",
216-
user: "User",
217-
image_selected: ''
218-
})
219-
220-
const params = signal({
221-
n_predict: 400,
222-
temperature: 0.7,
223-
repeat_last_n: 256, // 0 = disable penalty, -1 = context size
224-
repeat_penalty: 1.18, // 1.0 = disabled
225-
penalize_nl: false,
226-
top_k: 40, // <= 0 to use vocab size
227-
top_p: 0.95, // 1.0 = disabled
228-
min_p: 0.05, // 0 = disabled
229-
tfs_z: 1.0, // 1.0 = disabled
230-
typical_p: 1.0, // 1.0 = disabled
231-
presence_penalty: 0.0, // 0.0 = disabled
232-
frequency_penalty: 0.0, // 0.0 = disabled
233-
mirostat: 0, // 0/1/2
234-
mirostat_tau: 5, // target entropy
235-
mirostat_eta: 0.1, // learning rate
236-
grammar: '',
237-
n_probs: 0, // no completion_probabilities,
238-
min_keep: 0, // min probs from each sampler,
239-
image_data: [],
240-
cache_prompt: true,
241-
api_key: ''
242-
})
243-
244213
/* START: Support for storing prompt templates and parameters in browsers LocalStorage */
245214

246215
const local_storage_storageKey = "llamacpp_server_local_storage";

examples/server/public/params.js

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
import { signal } from './index.js';
2+
3+
export const session = signal({
4+
prompt: "This is a conversation between User and Llama, a friendly chatbot. Llama is helpful, kind, honest, good at writing, and never fails to answer any requests immediately and with precision.",
5+
template: "{{prompt}}\n\n{{history}}\n{{char}}:",
6+
historyTemplate: "{{name}}: {{message}}",
7+
transcript: [],
8+
type: "chat", // "chat" | "completion"
9+
char: "Llama",
10+
user: "User",
11+
image_selected: ''
12+
})
13+
14+
export const params = signal({
15+
n_predict: 400,
16+
temperature: 0.7,
17+
repeat_last_n: 256, // 0 = disable penalty, -1 = context size
18+
repeat_penalty: 1.18, // 1.0 = disabled
19+
penalize_nl: false,
20+
top_k: 40, // <= 0 to use vocab size
21+
top_p: 0.95, // 1.0 = disabled
22+
min_p: 0.05, // 0 = disabled
23+
tfs_z: 1.0, // 1.0 = disabled
24+
typical_p: 1.0, // 1.0 = disabled
25+
presence_penalty: 0.0, // 0.0 = disabled
26+
frequency_penalty: 0.0, // 0.0 = disabled
27+
mirostat: 0, // 0/1/2
28+
mirostat_tau: 5, // target entropy
29+
mirostat_eta: 0.1, // learning rate
30+
grammar: '',
31+
n_probs: 0, // no completion_probabilities,
32+
min_keep: 0, // min probs from each sampler,
33+
image_data: [],
34+
cache_prompt: true,
35+
api_key: ''
36+
})

0 commit comments

Comments
 (0)