|
42 | 42 | % StreamFun - Function to callback when streaming the
|
43 | 43 | % result
|
44 | 44 | %
|
45 |
| -% TimeOut - Connection Timeout in seconds (default: 10 secs) |
| 45 | +% TimeOut - Connection Timeout in seconds (default: 120 secs) |
46 | 46 | %
|
47 | 47 | %
|
48 | 48 | %
|
49 | 49 | % ollamaChat Functions:
|
50 | 50 | % ollamaChat - Chat completion API from OpenAI.
|
51 | 51 | % generate - Generate a response using the ollamaChat instance.
|
52 | 52 | %
|
53 |
| -% ollamaChat Properties: TODO TODO |
| 53 | +% ollamaChat Properties: |
54 | 54 | % Model - Model name (as expected by ollama server)
|
55 | 55 | %
|
56 | 56 | % Temperature - Temperature of generation.
|
57 | 57 | %
|
58 |
| -% TopProbabilityMass - Top probability mass to consider for generation. |
| 58 | +% TopProbabilityMass - Top probability mass to consider for generation (top-p sampling). |
| 59 | +% |
| 60 | +% TopProbabilityNum - Only consider the k most likely tokens for generation (top-k sampling). |
59 | 61 | %
|
60 | 62 | % StopSequences - Sequences to stop the generation of tokens.
|
61 | 63 | %
|
62 | 64 | % SystemPrompt - System prompt.
|
63 | 65 | %
|
64 | 66 | % ResponseFormat - Specifies the response format, text or json
|
65 | 67 | %
|
66 |
| -% TimeOut - Connection Timeout in seconds (default: 10 secs) |
| 68 | +% TimeOut - Connection Timeout in seconds (default: 120 secs) |
67 | 69 | %
|
68 | 70 |
|
69 | 71 | % Copyright 2024 The MathWorks, Inc.
|
|
83 | 85 | nvp.TopProbabilityNum (1,1) {mustBeReal,mustBePositive} = Inf
|
84 | 86 | nvp.StopSequences {llms.utils.mustBeValidStop} = {}
|
85 | 87 | nvp.ResponseFormat (1,1) string {mustBeMember(nvp.ResponseFormat,["text","json"])} = "text"
|
86 |
| - nvp.TimeOut (1,1) {mustBeReal,mustBePositive} = 10 |
| 88 | + nvp.TimeOut (1,1) {mustBeReal,mustBePositive} = 120 |
87 | 89 | nvp.StreamFun (1,1) {mustBeA(nvp.StreamFun,'function_handle')}
|
88 | 90 | end
|
89 | 91 |
|
|
0 commit comments