Skip to content

Commit 466460d

Browse files
committed
Rename TopProbabilityMassTopP, TopProbabilityNumTopK
As per team decision, use the names found everywhere in the literature, “top-p” and “top-k,” just with the spelling adjusted to match MATLAB naming rules.
1 parent d577d36 commit 466460d

File tree

11 files changed

+108
-108
lines changed

11 files changed

+108
-108
lines changed

+llms/+internal/callAzureChatAPI.m

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@
4747
nvp.ToolChoice
4848
nvp.APIVersion
4949
nvp.Temperature
50-
nvp.TopProbabilityMass
50+
nvp.TopP
5151
nvp.NumCompletions
5252
nvp.StopSequences
5353
nvp.MaxNumTokens
@@ -121,7 +121,7 @@
121121
function dict = mapNVPToParameters()
122122
dict = dictionary();
123123
dict("Temperature") = "temperature";
124-
dict("TopProbabilityMass") = "top_p";
124+
dict("TopP") = "top_p";
125125
dict("NumCompletions") = "n";
126126
dict("StopSequences") = "stop";
127127
dict("MaxNumTokens") = "max_tokens";

+llms/+internal/callOllamaChatAPI.m

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,8 @@
2828
model
2929
messages
3030
nvp.Temperature
31-
nvp.TopProbabilityMass
32-
nvp.TopProbabilityNum
31+
nvp.TopP
32+
nvp.TopK
3333
nvp.TailFreeSamplingZ
3434
nvp.NumCompletions
3535
nvp.StopSequences
@@ -99,8 +99,8 @@
9999
function dict = mapNVPToParameters()
100100
dict = dictionary();
101101
dict("Temperature") = "temperature";
102-
dict("TopProbabilityMass") = "top_p";
103-
dict("TopProbabilityNum") = "top_k";
102+
dict("TopP") = "top_p";
103+
dict("TopK") = "top_k";
104104
dict("TailFreeSamplingZ") = "tfs_z";
105105
dict("NumCompletions") = "n";
106106
dict("StopSequences") = "stop";

+llms/+internal/callOpenAIChatAPI.m

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@
4545
nvp.ToolChoice
4646
nvp.ModelName
4747
nvp.Temperature
48-
nvp.TopProbabilityMass
48+
nvp.TopP
4949
nvp.NumCompletions
5050
nvp.StopSequences
5151
nvp.MaxNumTokens
@@ -147,7 +147,7 @@
147147
function dict = mapNVPToParameters()
148148
dict = dictionary();
149149
dict("Temperature") = "temperature";
150-
dict("TopProbabilityMass") = "top_p";
150+
dict("TopP") = "top_p";
151151
dict("NumCompletions") = "n";
152152
dict("StopSequences") = "stop";
153153
dict("MaxNumTokens") = "max_tokens";

+llms/+internal/textGenerator.m

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,27 +4,27 @@
44
% Copyright 2023-2024 The MathWorks, Inc.
55

66
properties
7-
%TEMPERATURE Temperature of generation.
7+
%Temperature Temperature of generation.
88
Temperature {llms.utils.mustBeValidTemperature} = 1
99

10-
%TOPPROBABILITYMASS Top probability mass to consider for generation.
11-
TopProbabilityMass {llms.utils.mustBeValidTopP} = 1
10+
%TopP Top probability mass to consider for generation.
11+
TopP {llms.utils.mustBeValidTopP} = 1
1212

13-
%STOPSEQUENCES Sequences to stop the generation of tokens.
13+
%StopSequences Sequences to stop the generation of tokens.
1414
StopSequences {llms.utils.mustBeValidStop} = {}
1515
end
1616

1717
properties (SetAccess=protected)
18-
%TIMEOUT Connection timeout in seconds (default 10 secs)
18+
%TimeOut Connection timeout in seconds (default 10 secs)
1919
TimeOut
2020

21-
%FUNCTIONNAMES Names of the functions that the model can request calls
21+
%FunctionNames Names of the functions that the model can request calls
2222
FunctionNames
2323

24-
%SYSTEMPROMPT System prompt.
24+
%SystemPrompt System prompt.
2525
SystemPrompt = []
2626

27-
%RESPONSEFORMAT Response format, "text" or "json"
27+
%ResponseFormat Response format, "text" or "json"
2828
ResponseFormat
2929
end
3030

azureChat.m

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
% reduce it. Setting Temperature=0 removes
1919
% randomness from the output altogether.
2020
%
21-
% TopProbabilityMass - Top probability mass value for controlling the
21+
% TopP - Top probability mass value for controlling the
2222
% diversity of the output. Default value is 1;
2323
% lower values imply that only the more likely
2424
% words can appear in any particular place.
@@ -61,7 +61,7 @@
6161
% azureChat Properties:
6262
% Temperature - Temperature of generation.
6363
%
64-
% TopProbabilityMass - Top probability mass to consider for generation.
64+
% TopP - Top probability mass to consider for generation.
6565
%
6666
% StopSequences - Sequences to stop the generation of tokens.
6767
%
@@ -98,7 +98,7 @@
9898
nvp.Tools (1,:) {mustBeA(nvp.Tools, "openAIFunction")} = openAIFunction.empty
9999
nvp.APIVersion (1,1) {mustBeAPIVersion} = "2024-02-01"
100100
nvp.Temperature {llms.utils.mustBeValidTemperature} = 1
101-
nvp.TopProbabilityMass {llms.utils.mustBeValidTopP} = 1
101+
nvp.TopP {llms.utils.mustBeValidTopP} = 1
102102
nvp.StopSequences {llms.utils.mustBeValidStop} = {}
103103
nvp.ResponseFormat (1,1) string {mustBeMember(nvp.ResponseFormat,["text","json"])} = "text"
104104
nvp.APIKey {mustBeNonzeroLengthTextScalar}
@@ -135,7 +135,7 @@
135135
this.APIVersion = nvp.APIVersion;
136136
this.ResponseFormat = nvp.ResponseFormat;
137137
this.Temperature = nvp.Temperature;
138-
this.TopProbabilityMass = nvp.TopProbabilityMass;
138+
this.TopP = nvp.TopP;
139139
this.StopSequences = nvp.StopSequences;
140140
this.PresencePenalty = nvp.PresencePenalty;
141141
this.FrequencyPenalty = nvp.FrequencyPenalty;
@@ -192,7 +192,7 @@
192192
[text, message, response] = llms.internal.callAzureChatAPI(this.Endpoint, ...
193193
this.DeploymentID, messagesStruct, this.FunctionsStruct, ...
194194
ToolChoice=toolChoice, APIVersion = this.APIVersion, Temperature=this.Temperature, ...
195-
TopProbabilityMass=this.TopProbabilityMass, NumCompletions=nvp.NumCompletions,...
195+
TopP=this.TopP, NumCompletions=nvp.NumCompletions,...
196196
StopSequences=this.StopSequences, MaxNumTokens=nvp.MaxNumTokens, ...
197197
PresencePenalty=this.PresencePenalty, FrequencyPenalty=this.FrequencyPenalty, ...
198198
ResponseFormat=this.ResponseFormat,Seed=nvp.Seed, ...

functionSignatures.json

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
{"name":"Tools","kind":"namevalue","type":"openAIFunction"},
99
{"name":"ModelName","kind":"namevalue","type":"choices=llms.openai.models"},
1010
{"name":"Temperature","kind":"namevalue","type":["numeric","scalar",">=0","<=2"]},
11-
{"name":"TopProbabilityMass","kind":"namevalue","type":["numeric","scalar",">=0","<=1"]},
11+
{"name":"TopP","kind":"namevalue","type":["numeric","scalar",">=0","<=1"]},
1212
{"name":"StopSequences","kind":"namevalue","type":["string","vector"]},
1313
{"name":"ResponseFormat","kind":"namevalue","type":"choices={'text','json'}"},
1414
{"name":"APIKey","kind":"namevalue","type":["string","scalar"]},
@@ -50,7 +50,7 @@
5050
{"name":"Tools","kind":"namevalue","type":"openAIFunction"},
5151
{"name":"APIVersion","kind":"namevalue","type":"choices=llms.azure.apiVersions"},
5252
{"name":"Temperature","kind":"namevalue","type":["numeric","scalar",">=0","<=2"]},
53-
{"name":"TopProbabilityMass","kind":"namevalue","type":["numeric","scalar",">=0","<=1"]},
53+
{"name":"TopP","kind":"namevalue","type":["numeric","scalar",">=0","<=1"]},
5454
{"name":"StopSequences","kind":"namevalue","type":["string","vector"]},
5555
{"name":"ResponseFormat","kind":"namevalue","type":"choices={'text','json'}"},
5656
{"name":"APIKey","kind":"namevalue","type":["string","scalar"]},
@@ -89,8 +89,8 @@
8989
{"name":"model","kind":"positional","type":"choices=ollamaChat.models"},
9090
{"name":"systemPrompt","kind":"ordered","type":["string","scalar"]},
9191
{"name":"Temperature","kind":"namevalue","type":["numeric","scalar",">=0","<=2"]},
92-
{"name":"TopProbabilityMass","kind":"namevalue","type":["numeric","scalar",">=0","<=1"]},
93-
{"name":"TopProbabilityNum","kind":"namevalue","type":["numeric","scalar","integer",">=1"]},
92+
{"name":"TopP","kind":"namevalue","type":["numeric","scalar",">=0","<=1"]},
93+
{"name":"TopK","kind":"namevalue","type":["numeric","scalar","integer",">=1"]},
9494
{"name":"StopSequences","kind":"namevalue","type":["string","vector"]},
9595
{"name":"ResponseFormat","kind":"namevalue","type":"choices={'text','json'}"},
9696
{"name":"TailFreeSamplingZ","kind":"namevalue","type":["numeric","scalar","real"]},

ollamaChat.m

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,13 @@
1717
% values reduce it. Setting Temperature=0 removes
1818
% randomness from the output altogether.
1919
%
20-
% TopProbabilityMass - Top probability mass value for controlling the
20+
% TopP - Top probability mass value for controlling the
2121
% diversity of the output. Default value is 1;
2222
% lower values imply that only the more likely
2323
% words can appear in any particular place.
2424
% This is also known as top-p sampling.
2525
%
26-
% TopProbabilityNum - Maximum number of most likely tokens that are
26+
% TopK - Maximum number of most likely tokens that are
2727
% considered for output. Default is Inf, allowing
2828
% all tokens. Smaller values reduce diversity in
2929
% the output.
@@ -34,8 +34,8 @@
3434
% tail-free sampling. Lower values reduce
3535
% diversity, with some authors recommending
3636
% values around 0.95. Tail-free sampling is
37-
% slower than using TopProbabilityMass or
38-
% TopProbabilityNum.
37+
% slower than using TopP or
38+
% TopK.
3939
%
4040
% StopSequences - Vector of strings that when encountered, will
4141
% stop the generation of tokens. Default
@@ -71,7 +71,7 @@
7171

7272
properties
7373
Model (1,1) string
74-
TopProbabilityNum (1,1) {mustBeReal,mustBePositive} = Inf
74+
TopK (1,1) {mustBeReal,mustBePositive} = Inf
7575
TailFreeSamplingZ (1,1) {mustBeReal} = 1
7676
end
7777

@@ -81,8 +81,8 @@
8181
modelName {mustBeTextScalar}
8282
systemPrompt {llms.utils.mustBeTextOrEmpty} = []
8383
nvp.Temperature {llms.utils.mustBeValidTemperature} = 1
84-
nvp.TopProbabilityMass {llms.utils.mustBeValidTopP} = 1
85-
nvp.TopProbabilityNum (1,1) {mustBeReal,mustBePositive} = Inf
84+
nvp.TopP {llms.utils.mustBeValidTopP} = 1
85+
nvp.TopK (1,1) {mustBeReal,mustBePositive} = Inf
8686
nvp.StopSequences {llms.utils.mustBeValidStop} = {}
8787
nvp.ResponseFormat (1,1) string {mustBeMember(nvp.ResponseFormat,["text","json"])} = "text"
8888
nvp.TimeOut (1,1) {mustBeReal,mustBePositive} = 120
@@ -106,8 +106,8 @@
106106
this.Model = modelName;
107107
this.ResponseFormat = nvp.ResponseFormat;
108108
this.Temperature = nvp.Temperature;
109-
this.TopProbabilityMass = nvp.TopProbabilityMass;
110-
this.TopProbabilityNum = nvp.TopProbabilityNum;
109+
this.TopP = nvp.TopP;
110+
this.TopK = nvp.TopK;
111111
this.TailFreeSamplingZ = nvp.TailFreeSamplingZ;
112112
this.StopSequences = nvp.StopSequences;
113113
this.TimeOut = nvp.TimeOut;
@@ -155,7 +155,7 @@
155155
[text, message, response] = llms.internal.callOllamaChatAPI(...
156156
this.Model, messagesStruct, ...
157157
Temperature=this.Temperature, ...
158-
TopProbabilityMass=this.TopProbabilityMass, TopProbabilityNum=this.TopProbabilityNum,...
158+
TopP=this.TopP, TopK=this.TopK,...
159159
TailFreeSamplingZ=this.TailFreeSamplingZ,...
160160
NumCompletions=nvp.NumCompletions,...
161161
StopSequences=this.StopSequences, MaxNumTokens=nvp.MaxNumTokens, ...

openAIChat.m

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
% reduce it. Setting Temperature=0 removes
2020
% randomness from the output altogether.
2121
%
22-
% TopProbabilityMass - Top probability mass value for controlling the
22+
% TopP - Top probability mass value for controlling the
2323
% diversity of the output. Default value is 1;
2424
% lower values imply that only the more likely
2525
% words can appear in any particular place.
@@ -58,7 +58,7 @@
5858
%
5959
% Temperature - Temperature of generation.
6060
%
61-
% TopProbabilityMass - Top probability mass to consider for generation.
61+
% TopP - Top probability mass to consider for generation.
6262
%
6363
% StopSequences - Sequences to stop the generation of tokens.
6464
%
@@ -93,7 +93,7 @@
9393
nvp.Tools (1,:) {mustBeA(nvp.Tools, "openAIFunction")} = openAIFunction.empty
9494
nvp.ModelName (1,1) string {mustBeModel} = "gpt-3.5-turbo"
9595
nvp.Temperature {llms.utils.mustBeValidTemperature} = 1
96-
nvp.TopProbabilityMass {llms.utils.mustBeValidTopP} = 1
96+
nvp.TopP {llms.utils.mustBeValidTopP} = 1
9797
nvp.StopSequences {llms.utils.mustBeValidStop} = {}
9898
nvp.ResponseFormat (1,1) string {mustBeMember(nvp.ResponseFormat,["text","json"])} = "text"
9999
nvp.APIKey {mustBeNonzeroLengthTextScalar}
@@ -127,7 +127,7 @@
127127

128128
this.ModelName = nvp.ModelName;
129129
this.Temperature = nvp.Temperature;
130-
this.TopProbabilityMass = nvp.TopProbabilityMass;
130+
this.TopP = nvp.TopP;
131131
this.StopSequences = nvp.StopSequences;
132132

133133
% ResponseFormat is only supported in the latest models only
@@ -190,7 +190,7 @@
190190

191191
[text, message, response] = llms.internal.callOpenAIChatAPI(messagesStruct, this.FunctionsStruct,...
192192
ModelName=this.ModelName, ToolChoice=toolChoice, Temperature=this.Temperature, ...
193-
TopProbabilityMass=this.TopProbabilityMass, NumCompletions=nvp.NumCompletions,...
193+
TopP=this.TopP, NumCompletions=nvp.NumCompletions,...
194194
StopSequences=this.StopSequences, MaxNumTokens=nvp.MaxNumTokens, ...
195195
PresencePenalty=this.PresencePenalty, FrequencyPenalty=this.FrequencyPenalty, ...
196196
ResponseFormat=this.ResponseFormat,Seed=nvp.Seed, ...

tests/tazureChat.m

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -23,10 +23,10 @@ function constructChatWithAllNVP(testCase)
2323
systemPrompt = "This is a system prompt";
2424
timeout = 3;
2525
chat = azureChat(endpoint, deploymentID, systemPrompt, Tools=functions, ...
26-
Temperature=temperature, TopProbabilityMass=topP, StopSequences=stop, APIKey=apiKey,...
26+
Temperature=temperature, TopP=topP, StopSequences=stop, APIKey=apiKey,...
2727
FrequencyPenalty=frequenceP, PresencePenalty=presenceP, TimeOut=timeout);
2828
testCase.verifyEqual(chat.Temperature, temperature);
29-
testCase.verifyEqual(chat.TopProbabilityMass, topP);
29+
testCase.verifyEqual(chat.TopP, topP);
3030
testCase.verifyEqual(chat.StopSequences, stop);
3131
testCase.verifyEqual(chat.FrequencyPenalty, frequenceP);
3232
testCase.verifyEqual(chat.PresencePenalty, presenceP);
@@ -171,23 +171,23 @@ function keyNotFound(testCase)
171171
"Value", -20, ...
172172
"Error", "MATLAB:expectedNonnegative"), ...
173173
...
174-
"InvalidTopProbabilityMassType", struct( ...
175-
"Property", "TopProbabilityMass", ...
174+
"InvalidTopPType", struct( ...
175+
"Property", "TopP", ...
176176
"Value", "2", ...
177177
"Error", "MATLAB:invalidType"), ...
178178
...
179-
"InvalidTopProbabilityMassSize", struct( ...
180-
"Property", "TopProbabilityMass", ...
179+
"InvalidTopPSize", struct( ...
180+
"Property", "TopP", ...
181181
"Value", [1 1 1], ...
182182
"Error", "MATLAB:expectedScalar"), ...
183183
...
184-
"TopProbabilityMassTooLarge", struct( ...
185-
"Property", "TopProbabilityMass", ...
184+
"TopPTooLarge", struct( ...
185+
"Property", "TopP", ...
186186
"Value", 20, ...
187187
"Error", "MATLAB:notLessEqual"), ...
188188
...
189-
"TopProbabilityMassTooSmall", struct( ...
190-
"Property", "TopProbabilityMass", ...
189+
"TopPTooSmall", struct( ...
190+
"Property", "TopP", ...
191191
"Value", -20, ...
192192
"Error", "MATLAB:expectedNonnegative"), ...
193193
...
@@ -323,20 +323,20 @@ function keyNotFound(testCase)
323323
"Input",{{ "Temperature" -20 }},...
324324
"Error","MATLAB:expectedNonnegative"),...
325325
...
326-
"InvalidTopProbabilityMassType",struct( ...
327-
"Input",{{ "TopProbabilityMass" "2" }},...
326+
"InvalidTopPType",struct( ...
327+
"Input",{{ "TopP" "2" }},...
328328
"Error","MATLAB:invalidType"),...
329329
...
330-
"InvalidTopProbabilityMassSize",struct( ...
331-
"Input",{{ "TopProbabilityMass" [1 1 1] }},...
330+
"InvalidTopPSize",struct( ...
331+
"Input",{{ "TopP" [1 1 1] }},...
332332
"Error","MATLAB:expectedScalar"),...
333333
...
334-
"TopProbabilityMassTooLarge",struct( ...
335-
"Input",{{ "TopProbabilityMass" 20 }},...
334+
"TopPTooLarge",struct( ...
335+
"Input",{{ "TopP" 20 }},...
336336
"Error","MATLAB:notLessEqual"),...
337337
...
338-
"TopProbabilityMassTooSmall",struct( ...
339-
"Input",{{ "TopProbabilityMass" -20 }},...
338+
"TopPTooSmall",struct( ...
339+
"Input",{{ "TopP" -20 }},...
340340
"Error","MATLAB:expectedNonnegative"),...
341341
...
342342
"WrongTypeStopSequences",struct( ...

0 commit comments

Comments
 (0)