Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
6862d59
fix(ai-proxy): validate model tool support at Router init (fail fast)
Feb 5, 2026
9f06d04
refactor(ai-proxy): move isModelSupportingTools to router.ts
Feb 5, 2026
e4ab3a1
refactor(ai-proxy): extract isModelSupportingTools to dedicated file
Feb 5, 2026
80301dd
refactor(ai-proxy): extract isModelSupportingTools tests to dedicated…
Feb 5, 2026
ae0354d
fix: test
Feb 5, 2026
cc316da
fix(ai-proxy): remove gpt-3.5-turbo from unsupported models blocklist
Feb 5, 2026
acbe27b
chore(ai-proxy): remove model tools support integration test
Feb 5, 2026
f62081f
fix(agent): validate model tool support in addAi() for early failure
Feb 5, 2026
3ff397f
test(agent): reorder addAi tests
Feb 5, 2026
394e5c8
style(agent): fix prettier formatting in test
Feb 5, 2026
ce09754
chore(ai-proxy): remove integration test reference comments
Feb 5, 2026
4d5db25
chore(ai-proxy): remove @internal comment
Feb 5, 2026
a2f1e0e
test(ai-proxy): add model tool support verification test
Feb 5, 2026
fce3f95
test(ai-proxy): add all OpenAI models from types to tests
Feb 5, 2026
deaa050
test(ai-proxy): test all supported models and remove deprecated o1-mini
Feb 5, 2026
3baa377
refactor(ai-proxy): share SUPPORTED_OPENAI_MODELS between tests
Feb 5, 2026
7aa150a
feat(ai-proxy): add gpt-5 family to supported models
Feb 5, 2026
30f653e
chore(ai-proxy): upgrade @langchain/openai to 1.2.5 and fix lint
Feb 5, 2026
f0c58bb
feat(ai-proxy): fetch OpenAI models dynamically from API
Feb 5, 2026
094895e
refactor(ai-proxy): simplify unit tests for isModelSupportingTools
Feb 5, 2026
8fd7416
refactor(agent): move model validation to Router instantiation
Feb 5, 2026
ba88d44
fix(ai-proxy): improve model validation robustness
Feb 6, 2026
b9d8314
fix(ai-proxy): restore -pro pattern for models not supporting chat co…
Feb 6, 2026
2533ae7
refactor(ai-proxy): rename model lists for clarity
Feb 6, 2026
4847e4e
test(ai-proxy): add parallel_tool_calls to model verification test
Feb 6, 2026
a8df84c
fix(ai-proxy): block o-series models that don't support parallel_tool…
Feb 6, 2026
516706d
docs(ai-proxy): add JSDoc to isModelSupportingTools
Feb 6, 2026
f3ceff7
fix(ai-proxy): update variable names in test error message
Feb 6, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 0 additions & 8 deletions packages/agent/src/agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ import type {
import type { DataSource, DataSourceFactory } from '@forestadmin/datasource-toolkit';
import type { ForestSchema } from '@forestadmin/forestadmin-client';

import { isModelSupportingTools } from '@forestadmin/ai-proxy';
import { DataSourceCustomizer } from '@forestadmin/datasource-customizer';
import bodyParser from '@koa/bodyparser';
import cors from '@koa/cors';
Expand Down Expand Up @@ -246,13 +245,6 @@ export default class Agent<S extends TSchema = TSchema> extends FrameworkMounter
);
}

if (!isModelSupportingTools(configuration.model)) {
throw new Error(
`Model '${configuration.model}' does not support function calling (tools). ` +
'Please use a compatible model like gpt-4o, gpt-4o-mini, or gpt-4-turbo.',
);
}

this.options.logger(
'Warn',
`AI configuration added with model '${configuration.model}'. ` +
Expand Down
25 changes: 14 additions & 11 deletions packages/agent/test/agent.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -439,19 +439,22 @@ describe('Agent', () => {
).toThrow('addAi can only be called once. Multiple AI configurations are not supported yet.');
});

test('should throw an error when model does not support tools', () => {
test('should throw an error on start when model does not support tools', async () => {
// Use the real makeRoutes to trigger validation in AiProxyRouter
const realMakeRoutes = jest.requireActual('../src/routes').default;
mockMakeRoutes.mockImplementation(realMakeRoutes);

const agent = new Agent(options);

expect(() =>
agent.addAi({
name: 'gpt4-base',
provider: 'openai',
apiKey: 'test-key',
model: 'gpt-4',
}),
).toThrow(
"Model 'gpt-4' does not support function calling (tools). " +
'Please use a compatible model like gpt-4o, gpt-4o-mini, or gpt-4-turbo.',
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This sentence is removed, not really relevant I guess

agent.addAi({
name: 'gpt4-base',
provider: 'openai',
apiKey: 'test-key',
model: 'gpt-4',
});

await expect(agent.start()).rejects.toThrow(
"Model 'gpt-4' does not support tools. Please use a model that supports function calling.",
);
});

Expand Down
2 changes: 1 addition & 1 deletion packages/ai-proxy/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
"@langchain/core": "1.1.15",
"@langchain/langgraph": "^1.1.0",
"@langchain/mcp-adapters": "1.1.1",
"@langchain/openai": "1.2.2",
"@langchain/openai": "1.2.5",
"zod": "^4.3.5"
},
"devDependencies": {
Expand Down
9 changes: 9 additions & 0 deletions packages/ai-proxy/src/errors.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,15 @@ export class AIBadRequestError extends AIError {
}
}

export class AIModelNotSupportedError extends AIBadRequestError {
constructor(model: string) {
super(
`Model '${model}' does not support tools. Please use a model that supports function calling.`,
);
this.name = 'AIModelNotSupportedError';
}
}

export class AINotFoundError extends AIError {
constructor(message: string) {
super(message, 404);
Expand Down
1 change: 0 additions & 1 deletion packages/ai-proxy/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ export * from './remote-tools';
export * from './router';
export * from './mcp-client';
export * from './oauth-token-injector';

export * from './errors';

export function validMcpConfigurationOrThrow(mcpConfig: McpConfiguration) {
Expand Down
35 changes: 0 additions & 35 deletions packages/ai-proxy/src/provider-dispatcher.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,41 +21,6 @@ export type {
} from './provider';
export type { DispatchBody } from './schemas/route';

/**
* OpenAI model prefixes that do NOT support function calling (tools).
* Unknown models are allowed.
* @see https://platform.openai.com/docs/guides/function-calling
*/
const OPENAI_MODELS_WITHOUT_TOOLS_SUPPORT = [
'gpt-4',
'gpt-3.5-turbo',
'gpt-3.5',
'text-davinci',
'davinci',
'curie',
'babbage',
'ada',
];

/**
* Exceptions to the unsupported list - these models DO support tools
* even though they start with an unsupported prefix.
*/
const OPENAI_MODELS_EXCEPTIONS = ['gpt-4-turbo', 'gpt-4o', 'gpt-4.1'];

export function isModelSupportingTools(model: string): boolean {
const isException = OPENAI_MODELS_EXCEPTIONS.some(
exception => model === exception || model.startsWith(`${exception}-`),
);
if (isException) return true;

const isKnownUnsupported = OPENAI_MODELS_WITHOUT_TOOLS_SUPPORT.some(
unsupported => model === unsupported || model.startsWith(`${unsupported}-`),
);

return !isKnownUnsupported;
}

export class ProviderDispatcher {
private readonly chatModel: ChatOpenAI | null = null;

Expand Down
14 changes: 13 additions & 1 deletion packages/ai-proxy/src/router.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,12 @@ import type { RouteArgs } from './schemas/route';
import type { Logger } from '@forestadmin/datasource-toolkit';
import type { z } from 'zod';

import { AIBadRequestError, ProviderDispatcher } from './index';
import { AIBadRequestError, AIModelNotSupportedError } from './errors';
import McpClient from './mcp-client';
import { ProviderDispatcher } from './provider-dispatcher';
import { RemoteTools } from './remote-tools';
import { routeArgsSchema } from './schemas/route';
import isModelSupportingTools from './supported-models';

export type {
AiQueryArgs,
Expand Down Expand Up @@ -37,6 +39,16 @@ export class Router {
this.aiConfigurations = params?.aiConfigurations ?? [];
this.localToolsApiKeys = params?.localToolsApiKeys;
this.logger = params?.logger;

this.validateConfigurations();
}

private validateConfigurations(): void {
for (const config of this.aiConfigurations) {
if (!isModelSupportingTools(config.model)) {
throw new AIModelNotSupportedError(config.model);
}
}
}

/**
Expand Down
88 changes: 88 additions & 0 deletions packages/ai-proxy/src/supported-models.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
/**
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

To build the list, I ran integration test with all openai model and if the model returns an error, I added it to the blacklist.

* OpenAI model prefixes that do NOT support tool calls via the chat completions API.
*
* Uses prefix matching: model === prefix OR model.startsWith(prefix + '-')
*
* Unknown models are allowed by default.
* If a model fails the integration test, add it here.
*
* @see https://platform.openai.com/docs/guides/function-calling
*/
const UNSUPPORTED_MODEL_PREFIXES = [
// Legacy models
'gpt-4', // Base gpt-4 doesn't honor tool_choice: required
'text-davinci',
'davinci',
'curie',
'babbage',
'ada',
// O-series reasoning models - don't support parallel_tool_calls
'o1',
'o3',
'o4',
// Non-chat model families
'dall-e',
'whisper',
'tts',
'text-embedding',
'omni-moderation',
'chatgpt', // chatgpt-4o-latest, chatgpt-image-latest
'computer-use', // computer-use-preview
'gpt-image', // gpt-image-1, gpt-image-1.5
'gpt-realtime', // gpt-realtime, gpt-realtime-mini
'gpt-audio', // gpt-audio
'sora', // sora-2, sora-2-pro
'codex', // codex-mini-latest
];

/**
* OpenAI model patterns that do NOT support tool calls.
* Uses contains matching: model.includes(pattern)
*/
const UNSUPPORTED_MODEL_PATTERNS = [
// Non-chat model variants (can appear in the middle of model names)
'-realtime',
'-audio',
'-transcribe',
'-tts',
'-search',
'-codex',
'-instruct',
// Models that only support v1/responses, not v1/chat/completions
'-pro',
'-deep-research',
];

/**
* Models that DO support tool calls even though they match an unsupported prefix.
* These override the UNSUPPORTED_MODEL_PREFIXES list.
*/
const SUPPORTED_MODEL_OVERRIDES = ['gpt-4-turbo', 'gpt-4o', 'gpt-4.1'];

/**
* Checks if a model is compatible with Forest Admin AI.
*
* Supported models must handle tool calls and the parallel_tool_calls parameter.
*/
export default function isModelSupportingTools(model: string): boolean {
// Check pattern matches first (contains) - these NEVER support tools
const matchesUnsupportedPattern = UNSUPPORTED_MODEL_PATTERNS.some(pattern =>
model.includes(pattern),
);
if (matchesUnsupportedPattern) return false;

// Check unsupported prefixes
const matchesUnsupportedPrefix = UNSUPPORTED_MODEL_PREFIXES.some(
prefix => model === prefix || model.startsWith(`${prefix}-`),
);

// Check if model is in the supported overrides list
const isSupportedOverride = SUPPORTED_MODEL_OVERRIDES.some(
override => model === override || model.startsWith(`${override}-`),
);

// If it matches an unsupported prefix but is not in overrides, reject it
if (matchesUnsupportedPrefix && !isSupportedOverride) return false;

return true;
}
113 changes: 113 additions & 0 deletions packages/ai-proxy/test/llm.integration.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,42 @@ import type { Server } from 'http';

// eslint-disable-next-line import/extensions
import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
import OpenAI from 'openai';
import { z } from 'zod';

import { Router } from '../src';
import runMcpServer from '../src/examples/simple-mcp-server';
import isModelSupportingTools from '../src/supported-models';

const { OPENAI_API_KEY } = process.env;
const describeWithOpenAI = OPENAI_API_KEY ? describe : describe.skip;

/**
* Fetches available models from OpenAI API.
* Returns all models that pass `isModelSupportingTools`.
*
* If a model fails the integration test, update the blacklist in supported-models.ts.
*/
async function fetchChatModelsFromOpenAI(): Promise<string[]> {
const openai = new OpenAI({ apiKey: OPENAI_API_KEY });

let models;
try {
models = await openai.models.list();
} catch (error) {
throw new Error(
`Failed to fetch models from OpenAI API. ` +
`Ensure OPENAI_API_KEY is valid and network is available. ` +
`Original error: ${error}`,
);
}

return models.data
.map(m => m.id)
.filter(id => isModelSupportingTools(id))
.sort();
}

describeWithOpenAI('OpenAI Integration (real API)', () => {
const router = new Router({
aiConfigurations: [
Expand Down Expand Up @@ -688,4 +716,89 @@ describeWithOpenAI('OpenAI Integration (real API)', () => {
}, 10000);
});
});

describe('Model tool support verification', () => {
let modelsToTest: string[];

beforeAll(async () => {
modelsToTest = await fetchChatModelsFromOpenAI();
});

it('should have found chat models from OpenAI API', () => {
expect(modelsToTest.length).toBeGreaterThan(0);
// eslint-disable-next-line no-console
console.log(`Testing ${modelsToTest.length} models:`, modelsToTest);
});

it('all chat models should support tool calls', async () => {
const results: { model: string; success: boolean; error?: string }[] = [];

for (const model of modelsToTest) {
const modelRouter = new Router({
aiConfigurations: [{ name: 'test', provider: 'openai', model, apiKey: OPENAI_API_KEY }],
});

try {
const response = (await modelRouter.route({
route: 'ai-query',
body: {
messages: [{ role: 'user', content: 'What is 2+2?' }],
tools: [
{
type: 'function',
function: {
name: 'calculate',
description: 'Calculate a math expression',
parameters: { type: 'object', properties: { result: { type: 'number' } } },
},
},
],
tool_choice: 'required',
parallel_tool_calls: false,
},
})) as ChatCompletionResponse;

const success =
response.choices[0].finish_reason === 'tool_calls' &&
response.choices[0].message.tool_calls !== undefined;

results.push({ model, success });
} catch (error) {
const errorMessage = String(error);

// Infrastructure errors should fail the test immediately
const isInfrastructureError =
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

avoid false positive to add the model to the blacklist. Not really clean but for a test, i guess it's ok.

errorMessage.includes('rate limit') ||
errorMessage.includes('429') ||
errorMessage.includes('401') ||
errorMessage.includes('Authentication') ||
errorMessage.includes('ECONNREFUSED') ||
errorMessage.includes('ETIMEDOUT') ||
errorMessage.includes('getaddrinfo');

if (isInfrastructureError) {
throw new Error(`Infrastructure error testing model ${model}: ${errorMessage}`);
}

results.push({ model, success: false, error: errorMessage });
}
}

const failures = results.filter(r => !r.success);
if (failures.length > 0) {
const failedModelNames = failures.map(f => f.model).join(', ');
// eslint-disable-next-line no-console
console.error(
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This error message is important because I want to tell to the developper to update the blacklist

`\n❌ ${failures.length} model(s) failed: ${failedModelNames}\n\n` +
`To fix this, add the failing model(s) to the blacklist in:\n` +
` packages/ai-proxy/src/supported-models.ts\n\n` +
`Add to UNSUPPORTED_MODEL_PREFIXES (for prefix match)\n` +
`or UNSUPPORTED_MODEL_PATTERNS (for contains match)\n`,
failures,
);
}

expect(failures).toEqual([]);
}, 300000); // 5 minutes for all models
});
});
Loading
Loading