Skip to content

Commit f2795a8

Browse files
authored
Fix client in VLM JS inference snippet (#1013)
Small typo. In JS, the library should be `huggingface.js` not `huggingface_hub`.
1 parent 8eea49e commit f2795a8

File tree

1 file changed

+2
-2
lines changed
  • packages/tasks/src/snippets

1 file changed

+2
-2
lines changed

packages/tasks/src/snippets/js.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ export const snippetTextGeneration = (
5757
if (streaming) {
5858
return [
5959
{
60-
client: "huggingface_hub",
60+
client: "huggingface.js",
6161
content: `import { HfInference } from "@huggingface/inference"
6262
6363
const client = new HfInference("${accessToken || `{API_TOKEN}`}")
@@ -108,7 +108,7 @@ for await (const chunk of stream) {
108108
} else {
109109
return [
110110
{
111-
client: "huggingface_hub",
111+
client: "huggingface.js",
112112
content: `import { HfInference } from '@huggingface/inference'
113113
114114
const client = new HfInference("${accessToken || `{API_TOKEN}`}")

0 commit comments

Comments
 (0)