@@ -18,8 +18,8 @@ running `npm install`.
18
18
19
19
## Usage
20
20
21
- To use the module and run gptscripts, you need to first set the OPENAI_API_KEY environment variable to your OpenAI API
22
- key.
21
+ To use the module and run gptscripts, you need to first set the ` OPENAI_API_KEY ` environment variable to your OpenAI API
22
+ key. You can also set the ` GPTSCRIPT_BIN ` environment variable to change the execution of the gptscripts.
23
23
24
24
To ensure it is working properly, you can run the following command:
25
25
@@ -31,11 +31,10 @@ You will see "Hello, World!" in the output of the command.
31
31
32
32
## Client
33
33
34
- There are currently a couple "global" options, and the client helps to manage those. A client without any options is
35
- likely what you want. However, here are the current global options:
36
-
37
- - ` gptscriptURL ` : The URL (including `http(s)://) of an "SDK server" to use instead of the fork/exec model.
38
- - ` gptscriptBin ` : The path to a ` gptscript ` binary to use instead of the bundled one.
34
+ The client allows the caller to run gptscript files, tools, and other operations (see below). There are currently no
35
+ options for this singleton client, so ` gptscript.Client.Instance ` is all you need. Although, the intention is that a
36
+ single client is all you need for the life of your application, you should call ` close() ` on the client when you are
37
+ done.
39
38
40
39
## Options
41
40
@@ -45,7 +44,6 @@ None of the options is required, and the defaults will reduce the number of call
45
44
- ` disableCache ` : Enable or disable caching, default (true)
46
45
- ` cacheDir ` : Specify the cache directory
47
46
- ` quiet ` : No output logging
48
- - ` chdir ` : Change current working directory
49
47
- ` subTool ` : Use tool of this name, not the first tool
50
48
- ` workspace ` : Directory to use for the workspace, if specified it will not be deleted on exit
51
49
@@ -61,9 +59,10 @@ Lists all the available built-in tools.
61
59
const gptscript = require (' @gptscript-ai/gptscript' );
62
60
63
61
async function listTools () {
64
- const client = new gptscript.Client () ;
62
+ const client = gptscript .Client . Instance ;
65
63
const tools = await client .listTools ();
66
64
console .log (tools);
65
+ client .close ()
67
66
}
68
67
```
69
68
@@ -79,10 +78,12 @@ const gptscript = require('@gptscript-ai/gptscript');
79
78
async function listModels () {
80
79
let models = [];
81
80
try {
82
- const client = new gptscript.Client () ;
81
+ const client = gptscript .Client . Instance ;
83
82
models = await client .listModels ();
84
83
} catch (error) {
85
84
console .error (error);
85
+ } finally {
86
+ client .close ()
86
87
}
87
88
}
88
89
```
@@ -98,10 +99,12 @@ const gptscript = require('@gptscript-ai/gptscript');
98
99
99
100
async function version () {
100
101
try {
101
- const client = new gptscript.Client () ;
102
+ const client = gptscript .Client . Instance ;
102
103
console .log (await client .version ());
103
104
} catch (error) {
104
105
console .error (error);
106
+ } finally {
107
+ client .close ()
105
108
}
106
109
}
107
110
```
@@ -119,11 +122,13 @@ const t = {
119
122
};
120
123
121
124
try {
122
- const client = new gptscript.Client () ;
125
+ const client = gptscript .Client . Instance ;
123
126
const run = client .evaluate (t);
124
127
console .log (await run .text ());
125
128
} catch (error) {
126
129
console .error (error);
130
+ } finally {
131
+ client .close ();
127
132
}
128
133
```
129
134
@@ -141,11 +146,13 @@ const opts = {
141
146
142
147
async function execFile () {
143
148
try {
144
- const client = new gptscript.Client () ;
149
+ const client = gptscript .Client . Instance ;
145
150
const run = client .run (' ./hello.gpt' , opts);
146
151
console .log (await run .text ());
147
152
} catch (e) {
148
153
console .error (e);
154
+ } finally {
155
+ client .close ();
149
156
}
150
157
}
151
158
```
@@ -179,7 +186,7 @@ const opts = {
179
186
180
187
async function streamExecFileWithEvents () {
181
188
try {
182
- const client = new gptscript.Client () ;
189
+ const client = gptscript .Client . Instance ;
183
190
const run = client .run (' ./test.gpt' , opts);
184
191
185
192
run .on (gptscript .RunEventType .Event , data => {
@@ -189,6 +196,8 @@ async function streamExecFileWithEvents() {
189
196
await run .text ();
190
197
} catch (e) {
191
198
console .error (e);
199
+ } finally {
200
+ client .close ();
192
201
}
193
202
}
194
203
```
@@ -218,7 +227,7 @@ const t = {
218
227
};
219
228
220
229
async function streamExecFileWithEvents () {
221
- const client = new gptscript.Client () ;
230
+ const client = gptscript .Client . Instance ;
222
231
let run = client .evaluate (t, opts);
223
232
try {
224
233
// Wait for the initial run to complete.
@@ -236,6 +245,8 @@ async function streamExecFileWithEvents() {
236
245
}
237
246
} catch (e) {
238
247
console .error (e);
248
+ } finally {
249
+ client .close ();
239
250
}
240
251
241
252
0 commit comments