Skip to content

Commit 735416b

Browse files
committed
Throw server errors as errors
1 parent 4ae0248 commit 735416b

File tree

7 files changed

+44
-16
lines changed

7 files changed

+44
-16
lines changed

+llms/+utils/errorMessageCatalog.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,6 @@
5555
catalog("llms:pngExpected") = "Argument must be a PNG image.";
5656
catalog("llms:warningJsonInstruction") = "When using JSON mode, you must also prompt the model to produce JSON yourself via a system or user message.";
5757
catalog("llms:invalidOptionsForAzureBackEnd") = "The parameter Model Name is not compatible with Azure.";
58-
catalog("llms:apiReturnedError") = "OpenAI API Error: {1}";
58+
catalog("llms:apiReturnedError") = "Server error: ""{1}""";
5959
catalog("llms:dimensionsMustBeSmallerThan") = "Dimensions must be less than or equal to {1}.";
6060
end

azureChat.m

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -186,6 +186,11 @@
186186
PresencePenalty=this.PresencePenalty, FrequencyPenalty=this.FrequencyPenalty, ...
187187
ResponseFormat=this.ResponseFormat,Seed=nvp.Seed, ...
188188
APIKey=this.APIKey,TimeOut=this.TimeOut, StreamFun=this.StreamFun);
189+
190+
if isfield(response.Body.Data,"error")
191+
err = response.Body.Data.error.message;
192+
error("llms:apiReturnedError",llms.utils.errorMessageCatalog.getMessage("llms:apiReturnedError",err));
193+
end
189194
end
190195
end
191196

ollamaChat.m

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -149,6 +149,11 @@
149149
StopSequences=this.StopSequences, MaxNumTokens=nvp.MaxNumTokens, ...
150150
ResponseFormat=this.ResponseFormat,Seed=nvp.Seed, ...
151151
TimeOut=this.TimeOut, StreamFun=this.StreamFun);
152+
153+
if isfield(response.Body.Data,"error")
154+
err = response.Body.Data.error;
155+
error("llms:apiReturnedError",llms.utils.errorMessageCatalog.getMessage("llms:apiReturnedError",err));
156+
end
152157
end
153158
end
154159

openAIChat.m

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -186,10 +186,8 @@
186186

187187
if isfield(response.Body.Data,"error")
188188
err = response.Body.Data.error.message;
189-
text = llms.utils.errorMessageCatalog.getMessage("llms:apiReturnedError",err);
190-
message = struct("role","assistant","content",text);
189+
error("llms:apiReturnedError",llms.utils.errorMessageCatalog.getMessage("llms:apiReturnedError",err));
191190
end
192-
193191
end
194192
end
195193

tests/tazureChat.m

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,15 @@ function doGenerate(testCase)
4040
testCase.verifyGreaterThan(strlength(response),0);
4141
end
4242

43+
function doReturnErrors(testCase)
44+
testCase.assumeTrue(isenv("AZURE_OPENAI_API_KEY"),"end-to-end test requires environment variables AZURE_OPENAI_API_KEY, AZURE_OPENAI_ENDPOINT, and AZURE_OPENAI_DEPLOYMENT.");
45+
chat = azureChat(getenv("AZURE_OPENAI_ENDPOINT"), getenv("AZURE_OPENAI_DEPLOYMENT"));
46+
% This input is considerably longer than accepted as input for
47+
% GPT-3.5 (16385 tokens)
48+
wayTooLong = string(repmat('a ',1,20000));
49+
testCase.verifyError(@() generate(chat,wayTooLong), "llms:apiReturnedError");
50+
end
51+
4352
function seedFixesResult(testCase)
4453
testCase.assumeTrue(isenv("AZURE_OPENAI_API_KEY"),"end-to-end test requires environment variables AZURE_OPENAI_API_KEY, AZURE_OPENAI_ENDPOINT, and AZURE_OPENAI_DEPLOYMENT.");
4554
chat = azureChat(getenv("AZURE_OPENAI_ENDPOINT"), getenv("AZURE_OPENAI_DEPLOYMENT"));

tests/tollamaChat.m

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,15 @@ function streamFunc(testCase)
8787
testCase.verifyGreaterThan(numel(sf("")), 1);
8888
end
8989

90+
function doReturnErrors(testCase)
91+
testCase.assumeFalse( ...
92+
any(startsWith(ollamaChat.models,"abcdefghijklmnop")), ...
93+
"We want a model name that does not exist on this server");
94+
chat = ollamaChat("abcdefghijklmnop");
95+
testCase.verifyError(@() generate(chat,"hi!"), "llms:apiReturnedError");
96+
end
97+
98+
9099
function invalidInputsConstructor(testCase, InvalidConstructorInput)
91100
testCase.verifyError(@() ollamaChat("mistral", InvalidConstructorInput.Input{:}), InvalidConstructorInput.Error);
92101
end

tests/topenAIChat.m

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -13,14 +13,12 @@
1313
methods(Test)
1414
% Test methods
1515
function generateAcceptsSingleStringAsInput(testCase)
16-
chat = openAIChat(APIKey="this-is-not-a-real-key");
17-
testCase.verifyWarningFree(@()generate(chat,"This is okay"));
18-
chat = openAIChat(APIKey='this-is-not-a-real-key');
16+
chat = openAIChat;
1917
testCase.verifyWarningFree(@()generate(chat,"This is okay"));
2018
end
2119

2220
function generateAcceptsMessagesAsInput(testCase)
23-
chat = openAIChat(APIKey="this-is-not-a-real-key");
21+
chat = openAIChat;
2422
messages = openAIMessages;
2523
messages = addUserMessage(messages, "This should be okay.");
2624

@@ -73,15 +71,17 @@ function errorsWhenPassingToolChoiceWithEmptyTools(testCase)
7371

7472
function settingToolChoiceWithNone(testCase)
7573
functions = openAIFunction("funName");
76-
chat = openAIChat(APIKey="this-is-not-a-real-key",Tools=functions);
74+
chat = openAIChat(Tools=functions);
7775

7876
testCase.verifyWarningFree(@()generate(chat,"This is okay","ToolChoice","none"));
7977
end
8078

81-
function settingSeedToInteger(testCase)
82-
chat = openAIChat(APIKey="this-is-not-a-real-key");
79+
function fixedSeedFixesResult(testCase)
80+
chat = openAIChat;
8381

84-
testCase.verifyWarningFree(@()generate(chat,"This is okay", "Seed", 2));
82+
result1 = generate(chat,"This is okay", "Seed", 2);
83+
result2 = generate(chat,"This is okay", "Seed", 2);
84+
testCase.verifyEqual(result1,result2);
8585
end
8686

8787
function invalidInputsConstructor(testCase, InvalidConstructorInput)
@@ -189,10 +189,12 @@ function invalidGenerateInputforModel(testCase)
189189
testCase.verifyError(@()generate(chat,inValidMessages), "llms:invalidContentTypeForModel")
190190
end
191191

192-
function noStopSequencesNoMaxNumTokens(testCase)
193-
chat = openAIChat(APIKey="this-is-not-a-real-key");
194-
195-
testCase.verifyWarningFree(@()generate(chat,"This is okay"));
192+
function doReturnErrors(testCase)
193+
chat = openAIChat;
194+
% This input is considerably longer than accepted as input for
195+
% GPT-3.5 (16385 tokens)
196+
wayTooLong = string(repmat('a ',1,20000));
197+
testCase.verifyError(@() generate(chat,wayTooLong), "llms:apiReturnedError");
196198
end
197199

198200
function createOpenAIChatWithStreamFunc(testCase)

0 commit comments

Comments
 (0)