Skip to content

Commit 1a5ebb9

Browse files
committed
Fix capitalization: APIKey, by MathWorks naming standards.
This is *not* a breaking change, because we have case-insensitive(and partial) matching for these names.
1 parent 8d50886 commit 1a5ebb9

15 files changed

+145
-145
lines changed

+llms/+internal/callAzureChatAPI.m

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
% apiKey = "your-api-key-here"
3636
%
3737
% % Send a request
38-
% [text, message] = llms.internal.callAzureChatAPI(messages, functions, ApiKey=apiKey)
38+
% [text, message] = llms.internal.callAzureChatAPI(messages, functions, APIKey=apiKey)
3939

4040
% Copyright 2023-2024 The MathWorks, Inc.
4141

@@ -55,7 +55,7 @@
5555
nvp.FrequencyPenalty
5656
nvp.ResponseFormat
5757
nvp.Seed
58-
nvp.ApiKey
58+
nvp.APIKey
5959
nvp.TimeOut
6060
nvp.StreamFun
6161
end
@@ -64,7 +64,7 @@
6464

6565
parameters = buildParametersCall(messages, functions, nvp);
6666

67-
[response, streamedText] = llms.internal.sendRequest(parameters,nvp.ApiKey, URL, nvp.TimeOut, nvp.StreamFun);
67+
[response, streamedText] = llms.internal.sendRequest(parameters,nvp.APIKey, URL, nvp.TimeOut, nvp.StreamFun);
6868

6969
% If call errors, "choices" will not be part of response.Body.Data, instead
7070
% we get response.Body.Data.error

+llms/+internal/callOpenAIChatAPI.m

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
% apiKey = "your-api-key-here"
3636
%
3737
% % Send a request
38-
% [text, message] = llms.internal.callOpenAIChatAPI(messages, functions, ApiKey=apiKey)
38+
% [text, message] = llms.internal.callOpenAIChatAPI(messages, functions, APIKey=apiKey)
3939

4040
% Copyright 2023-2024 The MathWorks, Inc.
4141

@@ -53,7 +53,7 @@
5353
nvp.FrequencyPenalty
5454
nvp.ResponseFormat
5555
nvp.Seed
56-
nvp.ApiKey
56+
nvp.APIKey
5757
nvp.TimeOut
5858
nvp.StreamFun
5959
end
@@ -62,7 +62,7 @@
6262

6363
parameters = buildParametersCall(messages, functions, nvp);
6464

65-
[response, streamedText] = llms.internal.sendRequest(parameters,nvp.ApiKey, END_POINT, nvp.TimeOut, nvp.StreamFun);
65+
[response, streamedText] = llms.internal.sendRequest(parameters,nvp.APIKey, END_POINT, nvp.TimeOut, nvp.StreamFun);
6666

6767
% If call errors, "choices" will not be part of response.Body.Data, instead
6868
% we get response.Body.Data.error

+llms/+internal/getApiKeyFromNvpOrEnv.m

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,15 @@
44
%getApiKeyFromNvpOrEnv Retrieves an API key from a Name-Value Pair struct or environment variable.
55
%
66
% This function takes a struct nvp containing name-value pairs and checks if
7-
% it contains a field called "ApiKey". If the field is not found, the
7+
% it contains a field called "APIKey". If the field is not found, the
88
% function attempts to retrieve the API key from an environment variable
99
% whose name is given as the second argument. If both methods fail, the
1010
% function throws an error.
1111

1212
% Copyright 2023-2024 The MathWorks, Inc.
1313

14-
if isfield(nvp, "ApiKey")
15-
key = nvp.ApiKey;
14+
if isfield(nvp, "APIKey")
15+
key = nvp.APIKey;
1616
else
1717
if isenv(envVarName)
1818
key = getenv(envVarName);

+llms/+internal/textGenerator.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
properties (Access=protected)
3232
Tools
3333
FunctionsStruct
34-
ApiKey
34+
APIKey
3535
StreamFun
3636
end
3737
end

+llms/+utils/errorMessageCatalog.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@
4343
catalog("llms:assistantMustHaveTextNameAndArguments") = "Fields 'name' and 'arguments' must be text with one or more characters.";
4444
catalog("llms:mustBeValidIndex") = "Value is larger than the number of elements in Messages ({1}).";
4545
catalog("llms:stopSequencesMustHaveMax4Elements") = "Number of elements must not be larger than 4.";
46-
catalog("llms:keyMustBeSpecified") = "Unable to find API key. Either set environment variable {1} or specify name-value argument ""ApiKey"".";
46+
catalog("llms:keyMustBeSpecified") = "Unable to find API key. Either set environment variable {1} or specify name-value argument ""APIKey"".";
4747
catalog("llms:mustHaveMessages") = "Value must contain at least one message in Messages.";
4848
catalog("llms:mustSetFunctionsForCall") = "When no functions are defined, ToolChoice must not be specified.";
4949
catalog("llms:mustBeMessagesOrTxt") = "Messages must be text with one or more characters or an openAIMessages objects.";

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -340,7 +340,7 @@ messages = addUserMessageWithImages(messages,"What is in the image?",image_path)
340340

341341
## Establishing a connection to Chat Completions API using Azure
342342

343-
If you would like to connect MATLAB to Chat Completions API via Azure instead of directly with OpenAI, you will have to create an `azureChat` object. See [the Azure documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/chatgpt-quickstart) for details on the setup required and where to find your key, endpoint, and deployment name. As explained above, the key should be in the environment variable `AZURE_OPENAI_API_KEY`, or provided as `ApiKey=…` in the `azureChat` call below.
343+
If you would like to connect MATLAB to Chat Completions API via Azure instead of directly with OpenAI, you will have to create an `azureChat` object. See [the Azure documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/chatgpt-quickstart) for details on the setup required and where to find your key, endpoint, and deployment name. As explained above, the key should be in the environment variable `AZURE_OPENAI_API_KEY`, or provided as `APIKey=…` in the `azureChat` call below.
344344

345345
In order to create the chat assistant, you must specify your Azure OpenAI Resource and the LLM you want to use:
346346
```matlab

azureChat.m

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
% ResponseFormat - The format of response the model returns.
3030
% "text" (default) | "json"
3131
%
32-
% ApiKey - The API key for accessing the OpenAI Chat API.
32+
% APIKey - The API key for accessing the OpenAI Chat API.
3333
%
3434
% PresencePenalty - Penalty value for using a token in the response
3535
% that has already been used. Default value is 0.
@@ -91,7 +91,7 @@
9191
nvp.TopProbabilityMass {llms.utils.mustBeValidTopP} = 1
9292
nvp.StopSequences {llms.utils.mustBeValidStop} = {}
9393
nvp.ResponseFormat (1,1) string {mustBeMember(nvp.ResponseFormat,["text","json"])} = "text"
94-
nvp.ApiKey {mustBeNonzeroLengthTextScalar}
94+
nvp.APIKey {mustBeNonzeroLengthTextScalar}
9595
nvp.PresencePenalty {llms.utils.mustBeValidPenalty} = 0
9696
nvp.FrequencyPenalty {llms.utils.mustBeValidPenalty} = 0
9797
nvp.TimeOut (1,1) {mustBeReal,mustBePositive} = 10
@@ -129,7 +129,7 @@
129129
this.StopSequences = nvp.StopSequences;
130130
this.PresencePenalty = nvp.PresencePenalty;
131131
this.FrequencyPenalty = nvp.FrequencyPenalty;
132-
this.ApiKey = llms.internal.getApiKeyFromNvpOrEnv(nvp,"AZURE_OPENAI_API_KEY");
132+
this.APIKey = llms.internal.getApiKeyFromNvpOrEnv(nvp,"AZURE_OPENAI_API_KEY");
133133
this.TimeOut = nvp.TimeOut;
134134
end
135135

@@ -185,7 +185,7 @@
185185
StopSequences=this.StopSequences, MaxNumTokens=nvp.MaxNumTokens, ...
186186
PresencePenalty=this.PresencePenalty, FrequencyPenalty=this.FrequencyPenalty, ...
187187
ResponseFormat=this.ResponseFormat,Seed=nvp.Seed, ...
188-
ApiKey=this.ApiKey,TimeOut=this.TimeOut, StreamFun=this.StreamFun);
188+
APIKey=this.APIKey,TimeOut=this.TimeOut, StreamFun=this.StreamFun);
189189
end
190190
end
191191

extractOpenAIEmbeddings.m

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
%
1010
% 'ModelName' - The ID of the model to use.
1111
%
12-
% 'ApiKey' - OpenAI API token. It can also be specified by
12+
% 'APIKey' - OpenAI API token. It can also be specified by
1313
% setting the environment variable OPENAI_API_KEY
1414
%
1515
% 'TimeOut' - Connection Timeout in seconds (default: 10 secs)
@@ -28,7 +28,7 @@
2828
"text-embedding-3-large", "text-embedding-3-small"])} = "text-embedding-ada-002"
2929
nvp.TimeOut (1,1) {mustBeReal,mustBePositive} = 10
3030
nvp.Dimensions (1,1) {mustBeInteger,mustBePositive}
31-
nvp.ApiKey {llms.utils.mustBeNonzeroLengthTextScalar}
31+
nvp.APIKey {llms.utils.mustBeNonzeroLengthTextScalar}
3232
end
3333

3434
END_POINT = "https://api.openai.com/v1/embeddings";

functionSignatures.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
{"name":"TopProbabilityMass","kind":"namevalue","type":["numeric","scalar",">=0","<=1"]},
1212
{"name":"StopSequences","kind":"namevalue","type":["string","vector"]},
1313
{"name":"ResponseFormat","kind":"namevalue","type":"choices={'text','json'}"},
14-
{"name":"ApiKey","kind":"namevalue","type":["string","scalar"]},
14+
{"name":"APIKey","kind":"namevalue","type":["string","scalar"]},
1515
{"name":"PresencePenalty","kind":"namevalue","type":["numeric","scalar","<=2",">=-2"]},
1616
{"name":"FrequencyPenalty","kind":"namevalue","type":["numeric","scalar","<=2",">=-2"]},
1717
{"name":"TimeOut","kind":"namevalue","type":["numeric","scalar","real","positive"]},
@@ -53,7 +53,7 @@
5353
{"name":"TopProbabilityMass","kind":"namevalue","type":["numeric","scalar",">=0","<=1"]},
5454
{"name":"StopSequences","kind":"namevalue","type":["string","vector"]},
5555
{"name":"ResponseFormat","kind":"namevalue","type":"choices={'text','json'}"},
56-
{"name":"ApiKey","kind":"namevalue","type":["string","scalar"]},
56+
{"name":"APIKey","kind":"namevalue","type":["string","scalar"]},
5757
{"name":"PresencePenalty","kind":"namevalue","type":["numeric","scalar","<=2",">=-2"]},
5858
{"name":"FrequencyPenalty","kind":"namevalue","type":["numeric","scalar","<=2",">=-2"]},
5959
{"name":"TimeOut","kind":"namevalue","type":["numeric","scalar","real","positive"]},

openAIChat.m

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
% CHAT = openAIChat(systemPrompt) creates an openAIChat object with the
55
% specified system prompt.
66
%
7-
% CHAT = openAIChat(systemPrompt,ApiKey=key) uses the specified API key
7+
% CHAT = openAIChat(systemPrompt,APIKey=key) uses the specified API key
88
%
99
% CHAT = openAIChat(systemPrompt,Name=Value) specifies additional options
1010
% using one or more name-value arguments:
@@ -68,14 +68,14 @@
6868

6969
% Copyright 2023-2024 The MathWorks, Inc.
7070

71-
properties(SetAccess=private)
71+
properties(SetAccess=private)
7272
%MODELNAME Model name.
7373
ModelName
7474
end
7575

7676

7777
methods
78-
function this = openAIChat(systemPrompt, nvp)
78+
function this = openAIChat(systemPrompt, nvp)
7979
arguments
8080
systemPrompt {llms.utils.mustBeTextOrEmpty} = []
8181
nvp.Tools (1,:) {mustBeA(nvp.Tools, "openAIFunction")} = openAIFunction.empty
@@ -84,7 +84,7 @@
8484
nvp.TopProbabilityMass {llms.utils.mustBeValidTopP} = 1
8585
nvp.StopSequences {llms.utils.mustBeValidStop} = {}
8686
nvp.ResponseFormat (1,1) string {mustBeMember(nvp.ResponseFormat,["text","json"])} = "text"
87-
nvp.ApiKey {mustBeNonzeroLengthTextScalar}
87+
nvp.APIKey {mustBeNonzeroLengthTextScalar}
8888
nvp.PresencePenalty {llms.utils.mustBeValidPenalty} = 0
8989
nvp.FrequencyPenalty {llms.utils.mustBeValidPenalty} = 0
9090
nvp.TimeOut (1,1) {mustBeReal,mustBePositive} = 10
@@ -105,7 +105,7 @@
105105
this.Tools = nvp.Tools;
106106
[this.FunctionsStruct, this.FunctionNames] = functionAsStruct(nvp.Tools);
107107
end
108-
108+
109109
if ~isempty(systemPrompt)
110110
systemPrompt = string(systemPrompt);
111111
if systemPrompt ~= ""
@@ -124,7 +124,7 @@
124124

125125
this.PresencePenalty = nvp.PresencePenalty;
126126
this.FrequencyPenalty = nvp.FrequencyPenalty;
127-
this.ApiKey = llms.internal.getApiKeyFromNvpOrEnv(nvp,"OPENAI_API_KEY");
127+
this.APIKey = llms.internal.getApiKeyFromNvpOrEnv(nvp,"OPENAI_API_KEY");
128128
this.TimeOut = nvp.TimeOut;
129129
end
130130

@@ -143,13 +143,13 @@
143143
% MaxNumTokens - Maximum number of tokens in the generated response.
144144
% Default value is inf.
145145
%
146-
% ToolChoice - Function to execute. 'none', 'auto',
146+
% ToolChoice - Function to execute. 'none', 'auto',
147147
% or specify the function to call.
148148
%
149149
% Seed - An integer value to use to obtain
150150
% reproducible responses
151-
%
152-
% Currently, GPT-4 Turbo with vision does not support the message.name
151+
%
152+
% Currently, GPT-4 Turbo with vision does not support the message.name
153153
% parameter, functions/tools, response_format parameter, and stop
154154
% sequences. It also has a low MaxNumTokens default, which can be overridden.
155155

@@ -165,7 +165,7 @@
165165
toolChoice = convertToolChoice(this, nvp.ToolChoice);
166166

167167
if isstring(messages) && isscalar(messages)
168-
messagesStruct = {struct("role", "user", "content", messages)};
168+
messagesStruct = {struct("role", "user", "content", messages)};
169169
else
170170
messagesStruct = messages.Messages;
171171
end
@@ -175,14 +175,14 @@
175175
if ~isempty(this.SystemPrompt)
176176
messagesStruct = horzcat(this.SystemPrompt, messagesStruct);
177177
end
178-
178+
179179
[text, message, response] = llms.internal.callOpenAIChatAPI(messagesStruct, this.FunctionsStruct,...
180180
ModelName=this.ModelName, ToolChoice=toolChoice, Temperature=this.Temperature, ...
181181
TopProbabilityMass=this.TopProbabilityMass, NumCompletions=nvp.NumCompletions,...
182182
StopSequences=this.StopSequences, MaxNumTokens=nvp.MaxNumTokens, ...
183183
PresencePenalty=this.PresencePenalty, FrequencyPenalty=this.FrequencyPenalty, ...
184184
ResponseFormat=this.ResponseFormat,Seed=nvp.Seed, ...
185-
ApiKey=this.ApiKey,TimeOut=this.TimeOut, StreamFun=this.StreamFun);
185+
APIKey=this.APIKey,TimeOut=this.TimeOut, StreamFun=this.StreamFun);
186186

187187
if isfield(response.Body.Data,"error")
188188
err = response.Body.Data.error.message;
@@ -208,7 +208,7 @@ function mustBeValidFunctionCall(this, functionCall)
208208
% if toolChoice is empty
209209
if isempty(toolChoice)
210210
% if Tools is not empty, the default is 'auto'.
211-
if ~isempty(this.Tools)
211+
if ~isempty(this.Tools)
212212
toolChoice = "auto";
213213
end
214214
elseif ~ismember(toolChoice,["auto","none"])
@@ -240,11 +240,11 @@ function mustBeNonzeroLengthTextScalar(content)
240240

241241
function mustBeValidMsgs(value)
242242
if isa(value, "openAIMessages")
243-
if numel(value.Messages) == 0
243+
if numel(value.Messages) == 0
244244
error("llms:mustHaveMessages", llms.utils.errorMessageCatalog.getMessage("llms:mustHaveMessages"));
245245
end
246246
else
247-
try
247+
try
248248
llms.utils.mustBeNonzeroLengthTextScalar(value);
249249
catch ME
250250
error("llms:mustBeMessagesOrTxt", llms.utils.errorMessageCatalog.getMessage("llms:mustBeMessagesOrTxt"));

0 commit comments

Comments
 (0)