From c63718d999591145d2d0e64ed608cd1de053fe73 Mon Sep 17 00:00:00 2001 From: Alain Picard Date: Tue, 4 Nov 2025 08:39:29 -0500 Subject: [PATCH] Fixed issue with outdated HF inference libs. Started getting error: [cause]: Error: "https://api-inference.huggingface.co is no longer supported. Please use https://router.huggingface.co/hf-inference instead." 2025-11-03T20:03:28:802 |- at request (file:///usr/local/lib/node_modules/@alpic80/rivet-cli/node_modules/@huggingface/inference/dist/index.js:204:15) --- packages/core/package.json | 2 +- .../huggingface/nodes/ChatHuggingFace.ts | 6 ++-- .../nodes/TextToImageHuggingFace.ts | 33 +++++++++++-------- 3 files changed, 24 insertions(+), 17 deletions(-) diff --git a/packages/core/package.json b/packages/core/package.json index 13b85ceff..e253d9b2a 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -47,7 +47,7 @@ "@gentrace/core": "^2.2.5", "@google-cloud/vertexai": "^0.1.3", "@google/genai": "^0.12.0", - "@huggingface/inference": "^2.6.4", + "@huggingface/inference": "^4.13.0", "assemblyai": "^4.6.0", "autoevals": "^0.0.26", "cron-parser": "^4.9.0", diff --git a/packages/core/src/plugins/huggingface/nodes/ChatHuggingFace.ts b/packages/core/src/plugins/huggingface/nodes/ChatHuggingFace.ts index 62e52adae..a5bb32ed0 100644 --- a/packages/core/src/plugins/huggingface/nodes/ChatHuggingFace.ts +++ b/packages/core/src/plugins/huggingface/nodes/ChatHuggingFace.ts @@ -11,7 +11,7 @@ import { type PluginNodeImpl, type PortId, } from '../../../index.js'; -import { HfInference, HfInferenceEndpoint } from '@huggingface/inference'; +import { InferenceClient } from '@huggingface/inference'; import { getInputOrData } from '../../../utils/inputs.js'; import { coerceType } from '../../../utils/coerceType.js'; import { dedent } from '../../../utils/misc.js'; @@ -273,7 +273,9 @@ export const ChatHuggingFaceNodeImpl: PluginNodeImpl = { const topP = getInputOrData(data, inputData, 'topP', 'number'); const topK = getInputOrData(data, inputData, 'topK', 'number'); - const hf = endpoint ? new HfInferenceEndpoint(endpoint, accessToken) : new HfInference(accessToken); + const hf = endpoint + ? new InferenceClient(accessToken, {endpointUrl: endpoint} ) + : new InferenceClient(accessToken); const generationStream = hf.textGenerationStream({ inputs: prompt, diff --git a/packages/core/src/plugins/huggingface/nodes/TextToImageHuggingFace.ts b/packages/core/src/plugins/huggingface/nodes/TextToImageHuggingFace.ts index 816741ff8..bb6c4f4ab 100644 --- a/packages/core/src/plugins/huggingface/nodes/TextToImageHuggingFace.ts +++ b/packages/core/src/plugins/huggingface/nodes/TextToImageHuggingFace.ts @@ -11,7 +11,7 @@ import { type PluginNodeImpl, type PortId, } from '../../../index.js'; -import { HfInference, HfInferenceEndpoint } from '@huggingface/inference'; +import { InferenceClient } from '@huggingface/inference'; import { dedent } from 'ts-dedent'; import { pluginNodeDefinition } from '../../../model/NodeDefinition.js'; import { getInputOrData } from '../../../utils/inputs.js'; @@ -218,19 +218,24 @@ export const TextToImageHuggingFaceNodeImpl: PluginNodeImpl