Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions docs/assets/api/schemas.json
Original file line number Diff line number Diff line change
Expand Up @@ -2743,6 +2743,10 @@
"const": "GEMINI",
"type": "string"
},
{
"const": "VERTEX_AI",
"type": "string"
},
{
"const": "OPENAI",
"type": "string"
Expand All @@ -2769,6 +2773,10 @@
"const": "GEMINI",
"type": "string"
},
{
"const": "VERTEX_AI",
"type": "string"
},
{
"const": "OPENAI",
"type": "string"
Expand Down
6 changes: 3 additions & 3 deletions docs/researchers/add-agent-participant.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@ same way as a human would.

## Preparing an Experiment for Agent Participants

You can add agent participants to any experiment, as long as you have a Google
API key configured. (Right now, only Gemini models can be selected for agent
participants.) No other experiment-level config is necessary. However, we
You can add agent participants to any experiment, as long as you have an
API key configured for your chosen provider (Gemini, Vertex AI, OpenAI,
Claude, or Ollama). No other experiment-level config is necessary. However, we
recommend setting up your experiments with an eye for how agent participants
will see each stage.

Expand Down
3 changes: 2 additions & 1 deletion docs/researchers/getting-started.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,8 @@ Once you're logged in, you should be able to see a home page with
both your experiments and any shared public experiments.

> Tip: If you plan to set up LLM mediators, add relevant API keys
(e.g., [Gemini API key](https://ai.google.dev/gemini-api/docs/api-key))
(e.g., [Gemini API key](https://ai.google.dev/gemini-api/docs/api-key),
Vertex AI, OpenAI, Claude, or Ollama)
on the Settings page now. Your key must be present in order to effectively
run LLM agents during experiments. We store it in a Firebase document
that only you (and the backend function making the LLM calls
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,11 @@ export class AgentPersonaEditorComponent extends MobxLitElement {
'Gemini',
ApiKeyType.GEMINI_API_KEY,
)}
${this.renderApiTypeButton(
agentConfig,
'Vertex AI',
ApiKeyType.VERTEX_AI_API_KEY,
)}
${this.renderApiTypeButton(
agentConfig,
'OpenAI or compatible API',
Expand Down
63 changes: 63 additions & 0 deletions frontend/src/components/experimenter/experimenter_data_editor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,8 @@ export class ExperimenterDataEditor extends MobxLitElement {
</div>
${this.renderGeminiKey()}
<div class="divider"></div>
${this.renderVertexAISettings()}
<div class="divider"></div>
${this.renderOpenAISettings()}
<div class="divider"></div>
${this.renderClaudeSettings()}
Expand Down Expand Up @@ -158,6 +160,67 @@ export class ExperimenterDataEditor extends MobxLitElement {
`;
}

// ============ Vertex AI ============
private renderVertexAISettings() {
const updateVertexAISettings = (
e: InputEvent,
field: 'apiKey' | 'project' | 'location' | 'serviceAccountJson',
) => {
const oldData = this.authService.experimenterData;
if (!oldData) return;

const value = (e.target as HTMLInputElement).value;
this.setApiKeyResult(ApiKeyType.VERTEX_AI_API_KEY, {
status: CheckApiKeyStatus.NONE,
});

const newData = updateExperimenterData(oldData, {
apiKeys: {
...oldData.apiKeys,
vertexAIConfig: {
...(oldData.apiKeys?.vertexAIConfig ?? {}),
[field]: value,
},
},
});

this.authService.writeExperimenterData(newData);
};

const data = this.authService.experimenterData;
const config = data?.apiKeys.vertexAIConfig;
return html`
<div class="section">
<h3>Vertex AI API settings</h3>
<p>
Use either an API key (express mode) or service account credentials.
</p>
<md-filled-text-field
label="API key (express mode)"
placeholder="Add Vertex AI API key"
.value=${config?.apiKey ?? ''}
@input=${(e: InputEvent) => updateVertexAISettings(e, 'apiKey')}
></md-filled-text-field>
<p>Or use a service account:</p>
<md-filled-text-field
type="textarea"
label="Service account JSON"
placeholder="Paste the full JSON key file contents"
.value=${config?.serviceAccountJson ?? ''}
@input=${(e: InputEvent) =>
updateVertexAISettings(e, 'serviceAccountJson')}
></md-filled-text-field>
<md-filled-text-field
label="Location (optional, defaults to us-central1)"
placeholder="us-central1"
.value=${config?.location ?? ''}
@input=${(e: InputEvent) => updateVertexAISettings(e, 'location')}
></md-filled-text-field>
${this.renderCheckApiKey(ApiKeyType.VERTEX_AI_API_KEY)}
</div>
`;
}

// ============ Claude ============

private renderClaudeSettings() {
Expand Down
1 change: 1 addition & 0 deletions functions/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
"dependencies": {
"@ai-sdk/anthropic": "^3.0.59",
"@ai-sdk/google": "^3.0.46",
"@ai-sdk/google-vertex": "^4.0.73",
"@ai-sdk/openai": "^3.0.44",
"@deliberation-lab/utils": "file:../utils",
"@sinclair/typebox": "^0.34.48",
Expand Down
52 changes: 0 additions & 52 deletions functions/src/api/ai-sdk.api.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ const {
mapResultToModelResponse,
extractContentFromMessages,
mapErrorToModelResponse,
getCredentials,
API_TYPE_TO_PROVIDER,
} = _testing;

Expand Down Expand Up @@ -951,57 +950,6 @@ describe('mapErrorToModelResponse', () => {
});
});

// ============================================================================
// CREDENTIAL TESTS
// ============================================================================

describe('getCredentials', () => {
const apiKeyConfig: APIKeyConfig = {
geminiApiKey: 'gemini-key',
openAIApiKey: {apiKey: 'openai-key', baseUrl: 'https://custom.openai.com'},
claudeApiKey: {apiKey: 'claude-key', baseUrl: 'https://custom.claude.com'},
ollamaApiKey: {url: 'http://localhost:11434', apiKey: 'ollama-key'},
};

it('extracts Gemini credentials', () => {
const creds = getCredentials(apiKeyConfig, ApiKeyType.GEMINI_API_KEY);
expect(creds).toEqual({apiKey: 'gemini-key'});
});

it('extracts OpenAI credentials with baseURL', () => {
const creds = getCredentials(apiKeyConfig, ApiKeyType.OPENAI_API_KEY);
expect(creds).toEqual({
apiKey: 'openai-key',
baseURL: 'https://custom.openai.com',
});
});

it('extracts Claude credentials with baseURL', () => {
const creds = getCredentials(apiKeyConfig, ApiKeyType.CLAUDE_API_KEY);
expect(creds).toEqual({
apiKey: 'claude-key',
baseURL: 'https://custom.claude.com',
});
});

it('extracts Ollama credentials', () => {
const creds = getCredentials(apiKeyConfig, ApiKeyType.OLLAMA_CUSTOM_URL);
expect(creds).toEqual({
apiKey: 'ollama-key',
baseURL: 'http://localhost:11434',
});
});

it('handles missing optional baseUrl', () => {
// Create a partial config - getCredentials handles missing fields gracefully
const config = {
openAIApiKey: {apiKey: 'key', baseUrl: ''},
} as APIKeyConfig;
const creds = getCredentials(config, ApiKeyType.OPENAI_API_KEY);
expect(creds).toEqual({apiKey: 'key', baseURL: undefined});
});
});

describe('API_TYPE_TO_PROVIDER', () => {
it('maps all ApiKeyType values', () => {
expect(API_TYPE_TO_PROVIDER[ApiKeyType.GEMINI_API_KEY]).toBe('google');
Expand Down
100 changes: 54 additions & 46 deletions functions/src/api/ai-sdk.api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import type {AssistantContent, FilePart} from '@ai-sdk/provider-utils';
import {createOpenAI} from '@ai-sdk/openai';
import {createAnthropic} from '@ai-sdk/anthropic';
import {createGoogleGenerativeAI} from '@ai-sdk/google';
import {createVertex} from '@ai-sdk/google-vertex';
import {createOllama} from 'ollama-ai-provider-v2';

// Re-export ModelMessage for use throughout functions package
Expand All @@ -32,6 +33,7 @@ import {
APIKeyConfig,
AgentModelSettings,
ApiKeyType,
VertexAIConfig,
isAlwaysThinkingModel,
schemaToObject,
} from '@deliberation-lab/utils';
Expand All @@ -44,32 +46,68 @@ import {
* Provider factory creates a language model instance from configuration.
* Returns LanguageModel (which can be a string or a model object).
*/
type ProviderFactory = (config: {
apiKey?: string;
baseURL?: string;
}) => (modelId: string) => LanguageModel;
type ProviderFactory = (
config: APIKeyConfig,
) => (modelId: string) => LanguageModel;

/**
* Registry of available AI providers.
* Adding a new provider requires:
* 1. npm install @ai-sdk/[provider]
* 2. Add one line to this registry
*/
/**
* Creates a Vertex AI model factory.
* Supports express mode (API key) and service account auth.
*/
function getVertexModelFactory(
config?: VertexAIConfig,
): (modelId: string) => LanguageModel {
if (config?.apiKey) {
const provider = createVertex({apiKey: config.apiKey});
return (modelId: string) => provider(modelId) as LanguageModel;
}
// Parse service account JSON to extract credentials
const serviceAccount = config?.serviceAccountJson
? JSON.parse(config.serviceAccountJson)
: {};
const provider = createVertex({
project: config?.project || serviceAccount.project_id,
location: config?.location,
googleAuthOptions: {
credentials: {
client_email: serviceAccount.client_email,
private_key: serviceAccount.private_key,
},
},
});
return (modelId: string) => provider(modelId) as LanguageModel;
}

const PROVIDER_REGISTRY: Record<string, ProviderFactory> = {
google: ({apiKey}) => {
const provider = createGoogleGenerativeAI({apiKey});
google: (config) => {
const provider = createGoogleGenerativeAI({apiKey: config.geminiApiKey});
return (modelId: string) => provider(modelId) as LanguageModel;
},
openai: ({apiKey, baseURL}) => {
const provider = createOpenAI({apiKey, baseURL});
vertex: (config) => getVertexModelFactory(config.vertexAIConfig),
openai: (config) => {
const provider = createOpenAI({
apiKey: config.openAIApiKey?.apiKey,
baseURL: config.openAIApiKey?.baseUrl || undefined,
});
return (modelId: string) => provider(modelId) as LanguageModel;
},
anthropic: ({apiKey, baseURL}) => {
const provider = createAnthropic({apiKey, baseURL});
anthropic: (config) => {
const provider = createAnthropic({
apiKey: config.claudeApiKey?.apiKey,
baseURL: config.claudeApiKey?.baseUrl || undefined,
});
return (modelId: string) => provider(modelId) as LanguageModel;
},
ollama: ({baseURL}) => {
const provider = createOllama({baseURL});
ollama: (config) => {
const provider = createOllama({
baseURL: config.ollamaApiKey?.url,
});
return (modelId: string) => provider(modelId) as LanguageModel;
},
};
Expand All @@ -79,41 +117,12 @@ const PROVIDER_REGISTRY: Record<string, ProviderFactory> = {
*/
const API_TYPE_TO_PROVIDER: Record<ApiKeyType, string> = {
[ApiKeyType.GEMINI_API_KEY]: 'google',
[ApiKeyType.VERTEX_AI_API_KEY]: 'vertex',
[ApiKeyType.OPENAI_API_KEY]: 'openai',
[ApiKeyType.CLAUDE_API_KEY]: 'anthropic',
[ApiKeyType.OLLAMA_CUSTOM_URL]: 'ollama',
};

/**
* Extracts credentials from APIKeyConfig based on API type.
*/
function getCredentials(
apiKeyConfig: APIKeyConfig,
apiType: ApiKeyType,
): {apiKey?: string; baseURL?: string} {
switch (apiType) {
case ApiKeyType.GEMINI_API_KEY:
return {apiKey: apiKeyConfig.geminiApiKey};
case ApiKeyType.OPENAI_API_KEY:
return {
apiKey: apiKeyConfig.openAIApiKey?.apiKey,
baseURL: apiKeyConfig.openAIApiKey?.baseUrl || undefined,
};
case ApiKeyType.CLAUDE_API_KEY:
return {
apiKey: apiKeyConfig.claudeApiKey?.apiKey,
baseURL: apiKeyConfig.claudeApiKey?.baseUrl || undefined,
};
case ApiKeyType.OLLAMA_CUSTOM_URL:
return {
apiKey: apiKeyConfig.ollamaApiKey?.apiKey || undefined,
baseURL: apiKeyConfig.ollamaApiKey?.url,
};
default:
return {};
}
}

/**
* Gets a language model instance from the provider registry.
*/
Expand All @@ -135,8 +144,7 @@ function getModel(
);
}

const credentials = getCredentials(apiKeyConfig, modelSettings.apiType);
return providerFactory(credentials)(modelSettings.modelName);
return providerFactory(apiKeyConfig)(modelSettings.modelName);
}

// ============================================================================
Expand Down Expand Up @@ -355,7 +363,8 @@ function getProviderOptions(
const overrides = generationConfig.providerOptions;

switch (apiType) {
case ApiKeyType.GEMINI_API_KEY: {
case ApiKeyType.GEMINI_API_KEY:
case ApiKeyType.VERTEX_AI_API_KEY: {
const mapped = buildGoogleOptions(generationConfig, modelName);
const merged = {...mapped, ...overrides?.google};
return {google: merged};
Expand Down Expand Up @@ -966,6 +975,5 @@ export const _testing = {
mapResultToModelResponse,
extractContentFromMessages,
mapErrorToModelResponse,
getCredentials,
API_TYPE_TO_PROVIDER,
};
Loading