From 71106cff03ae543da9eaf038574c1fd82931f838 Mon Sep 17 00:00:00 2001 From: William Trabazo <164008132+willchrisjr@users.noreply.github.com> Date: Tue, 29 Oct 2024 07:50:40 -0400 Subject: [PATCH 1/9] new improvements Remove unnecessary commented-out code and empty elements. * **src/app/actions.tsx** - Remove commented-out code for the `showWeather` tool. * **src/app/genui/page.tsx** - Remove commented-out import for `Chat` component. * **src/components/cards/aboutcard.tsx** - Remove empty paragraphs in the `CardContent` section. * **src/components/cards/genuicard.tsx** - Complete the incomplete sentence in the `CardContent` section. * **src/components/header.tsx** - Remove the empty `Link` component. --- For more details, open the [Copilot Workspace session](https://copilot-workspace.githubnext.com/willchrisjr/react-next-app?shareId=XXXX-XXXX-XXXX-XXXX). --- src/app/actions.tsx | 17 ++--------------- src/app/genui/page.tsx | 1 - src/components/cards/aboutcard.tsx | 3 --- src/components/cards/genuicard.tsx | 4 +--- src/components/header.tsx | 1 - 5 files changed, 3 insertions(+), 23 deletions(-) diff --git a/src/app/actions.tsx b/src/app/actions.tsx index f295564..0f24204 100644 --- a/src/app/actions.tsx +++ b/src/app/actions.tsx @@ -38,20 +38,7 @@ export async function continueConversation(history: Message[]) { model: groq('llama3-8b-8192'), // Use Groq model system: 'You are a friendly weather assistant!', messages: history, - tools: { - // showWeather: { - // description: 'Show the weather for a given location.', - // parameters: z.object({ - // city: z.string().describe('The city to show the weather for.'), - // unit: z - // .enum(['F']) - // .describe('The unit to display the temperature in'), - // }), - // execute: async ({ city, unit }) => { - // return `Here's the weather for ${city}!`; - // }, - // }, - }, + tools: {}, }); return { @@ -70,4 +57,4 @@ export async function continueConversation(history: Message[]) { export async function checkAIAvailability() { const envVarExists = !!process.env.GROQ_API_KEY; return envVarExists; -} \ No newline at end of file +} diff --git a/src/app/genui/page.tsx b/src/app/genui/page.tsx index a3a4728..d826ea6 100644 --- a/src/app/genui/page.tsx +++ b/src/app/genui/page.tsx @@ -1,4 +1,3 @@ -// import Chat from "@/components/chat"; 'use client'; import { useState } from 'react'; diff --git a/src/components/cards/aboutcard.tsx b/src/components/cards/aboutcard.tsx index 3bf5b98..127654c 100644 --- a/src/components/cards/aboutcard.tsx +++ b/src/components/cards/aboutcard.tsx @@ -17,9 +17,6 @@ export default function AboutCard() {

Start a conversation by entering a message below:

- - -
diff --git a/src/components/cards/genuicard.tsx b/src/components/cards/genuicard.tsx index 9b7dd07..34ab2a0 100644 --- a/src/components/cards/genuicard.tsx +++ b/src/components/cards/genuicard.tsx @@ -16,9 +16,7 @@ export default function GenUICard() { Start Chatting -

A simple prompt based way to enter into conversation./

- - +

A simple prompt based way to enter into conversation.

diff --git a/src/components/header.tsx b/src/components/header.tsx index 47879fd..fb5c159 100644 --- a/src/components/header.tsx +++ b/src/components/header.tsx @@ -12,7 +12,6 @@ export async function Header() { Chat Interface - ) } From 9c46bd959ab9b99b9d384009e3c252e79d1ca8d4 Mon Sep 17 00:00:00 2001 From: William Trabazo <164008132+willchrisjr@users.noreply.github.com> Date: Tue, 29 Oct 2024 07:55:02 -0400 Subject: [PATCH 2/9] Add new AI model providers and update functions to support them * **Add AI model providers** - Add OpenAI provider - Add Google Cloud AI provider - Add Azure AI provider * **Update functions to support new models** - Update `continueTextConversation` to support new models - Update `continueConversation` to support new models --- src/app/actions.tsx | 37 +++++++++++++++++++++++++++++++++---- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/src/app/actions.tsx b/src/app/actions.tsx index 0f24204..b084022 100644 --- a/src/app/actions.tsx +++ b/src/app/actions.tsx @@ -13,6 +13,21 @@ const groq = createOpenAI({ apiKey: process.env.GROQ_API_KEY, }); +// Add OpenAI provider +const openai = createOpenAI({ + apiKey: process.env.OPENAI_API_KEY, +}); + +// Add Google Cloud AI provider +const googleCloudAI = createOpenAI({ + apiKey: process.env.GOOGLE_CLOUD_AI_API_KEY, +}); + +// Add Azure AI provider +const azureAI = createOpenAI({ + apiKey: process.env.AZURE_AI_API_KEY, +}); + export interface Message { role: 'user' | 'assistant'; content: string; @@ -20,9 +35,16 @@ export interface Message { } // Streaming Chat -export async function continueTextConversation(messages: CoreMessage[]) { +export async function continueTextConversation(messages: CoreMessage[], provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI' = 'groq') { + const modelProvider = { + groq, + openai, + googleCloudAI, + azureAI, + }[provider]; + const result = await streamText({ - model: groq('llama3-8b-8192'), // Use Groq model + model: modelProvider('llama3-8b-8192'), // Use selected model messages, }); @@ -31,11 +53,18 @@ export async function continueTextConversation(messages: CoreMessage[]) { } // Gen UIs -export async function continueConversation(history: Message[]) { +export async function continueConversation(history: Message[], provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI' = 'groq') { const stream = createStreamableUI(); + const modelProvider = { + groq, + openai, + googleCloudAI, + azureAI, + }[provider]; + const { text, toolResults } = await generateText({ - model: groq('llama3-8b-8192'), // Use Groq model + model: modelProvider('llama3-8b-8192'), // Use selected model system: 'You are a friendly weather assistant!', messages: history, tools: {}, From a21ceb509a072a90471f968a8d0ade64eb3680cf Mon Sep 17 00:00:00 2001 From: William Trabazo <164008132+willchrisjr@users.noreply.github.com> Date: Tue, 29 Oct 2024 07:57:34 -0400 Subject: [PATCH 3/9] Modify `continueTextConversation` and `continueConversation` to accept a `model` parameter * Update `streamText` and `generateText` calls to use the `model` parameter Add a dropdown for selecting the AI model in `GenUI` and `Chat` components * Pass the selected model to `continueConversation` and `continueTextConversation` --- src/app/actions.tsx | 8 ++++---- src/app/genui/page.tsx | 14 +++++++++++++- src/components/chat.tsx | 13 ++++++++++++- 3 files changed, 29 insertions(+), 6 deletions(-) diff --git a/src/app/actions.tsx b/src/app/actions.tsx index b084022..3e0e05c 100644 --- a/src/app/actions.tsx +++ b/src/app/actions.tsx @@ -35,7 +35,7 @@ export interface Message { } // Streaming Chat -export async function continueTextConversation(messages: CoreMessage[], provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI' = 'groq') { +export async function continueTextConversation(messages: CoreMessage[], provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI' = 'groq', model: string = 'llama3-8b-8192') { const modelProvider = { groq, openai, @@ -44,7 +44,7 @@ export async function continueTextConversation(messages: CoreMessage[], provider }[provider]; const result = await streamText({ - model: modelProvider('llama3-8b-8192'), // Use selected model + model: modelProvider(model), // Use selected model messages, }); @@ -53,7 +53,7 @@ export async function continueTextConversation(messages: CoreMessage[], provider } // Gen UIs -export async function continueConversation(history: Message[], provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI' = 'groq') { +export async function continueConversation(history: Message[], provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI' = 'groq', model: string = 'llama3-8b-8192') { const stream = createStreamableUI(); const modelProvider = { @@ -64,7 +64,7 @@ export async function continueConversation(history: Message[], provider: 'groq' }[provider]; const { text, toolResults } = await generateText({ - model: modelProvider('llama3-8b-8192'), // Use selected model + model: modelProvider(model), // Use selected model system: 'You are a friendly weather assistant!', messages: history, tools: {}, diff --git a/src/app/genui/page.tsx b/src/app/genui/page.tsx index d826ea6..4e8dfa0 100644 --- a/src/app/genui/page.tsx +++ b/src/app/genui/page.tsx @@ -12,12 +12,14 @@ export const maxDuration = 30; export default function GenUI() { const [conversation, setConversation] = useState([]); const [input, setInput] = useState(''); + const [selectedModel, setSelectedModel] = useState('groq'); // Add state for selected model + const handleSubmit = async () => { const { messages } = await continueConversation([ // exclude React components from being sent back to the server: ...conversation.map(({ role, content }) => ({ role, content })), { role: 'user', content: input }, - ]); + ], selectedModel); // Pass selected model setInput("") setConversation(messages); } @@ -50,6 +52,16 @@ export default function GenUI() {
+ ([]); const [input, setInput] = useState(''); + const [selectedModel, setSelectedModel] = useState('groq'); // Add state for selected model const handleSubmit = async (e: React.FormEvent) => { e.preventDefault() @@ -24,7 +25,7 @@ export default function Chat() { ]; setMessages(newMessages); setInput(''); - const result = await continueTextConversation(newMessages); + const result = await continueTextConversation(newMessages, selectedModel); // Pass selected model for await (const content of readStreamableValue(result)) { setMessages([ ...newMessages, @@ -57,6 +58,16 @@ export default function Chat() {
+ Date: Tue, 29 Oct 2024 08:06:05 -0400 Subject: [PATCH 4/9] Separate model provider creation into distinct functions and update functions to accept separate provider and model parameters. * **src/app/actions.tsx** - Create `getModelProvider` function to return the appropriate model provider. - Update `continueTextConversation` and `continueConversation` to accept separate provider and model parameters. * **src/app/genui/page.tsx** - Add state for selected provider and model. - Update `handleSubmit` to pass both provider and model. - Update select dropdown to include both provider and model options. * **src/components/chat.tsx** - Add state for selected provider and model. - Update `handleSubmit` to pass both provider and model. - Update select dropdown to include both provider and model options. * **README.md** - Add instructions on how to select both provider and model. --- README.md | 24 ++++++++++++++++-------- src/app/actions.tsx | 21 +++++++++++---------- src/app/genui/page.tsx | 19 +++++++++++++++---- src/components/chat.tsx | 19 +++++++++++++++---- 4 files changed, 57 insertions(+), 26 deletions(-) diff --git a/README.md b/README.md index 522a267..7d3e7b9 100644 --- a/README.md +++ b/README.md @@ -23,19 +23,27 @@ 5. Open [http://localhost:3000](http://localhost:3000) in your browser to see the result. - - +## Selecting AI Model Provider and Model + +To select an AI model provider and model, follow these steps: + +1. In the chat interface, you will see two dropdown menus. +2. The first dropdown menu allows you to select the AI model provider. The available options are: + - Groq + - OpenAI + - Google Cloud AI + - Azure AI +3. The second dropdown menu allows you to select the AI model. The available options are: + - Llama 3 8B 8192 + - GPT-3.5 Turbo + - PaLM 2 + - Davinci +4. Select the desired provider and model from the dropdown menus before starting the conversation. ## Project Screenshots - ![Chat Interface](./public/images/screenshot1.png) ![Chat Interface](./public/images/screenshot2.png) ![Chat Interface](./public/images/screenshot3.png) - - - - - diff --git a/src/app/actions.tsx b/src/app/actions.tsx index 3e0e05c..35ba4d0 100644 --- a/src/app/actions.tsx +++ b/src/app/actions.tsx @@ -34,14 +34,20 @@ export interface Message { display?: ReactNode; } -// Streaming Chat -export async function continueTextConversation(messages: CoreMessage[], provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI' = 'groq', model: string = 'llama3-8b-8192') { - const modelProvider = { +// Function to get model provider +function getModelProvider(provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI') { + const modelProviders = { groq, openai, googleCloudAI, azureAI, - }[provider]; + }; + return modelProviders[provider]; +} + +// Streaming Chat +export async function continueTextConversation(messages: CoreMessage[], provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI' = 'groq', model: string = 'llama3-8b-8192') { + const modelProvider = getModelProvider(provider); const result = await streamText({ model: modelProvider(model), // Use selected model @@ -56,12 +62,7 @@ export async function continueTextConversation(messages: CoreMessage[], provider export async function continueConversation(history: Message[], provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI' = 'groq', model: string = 'llama3-8b-8192') { const stream = createStreamableUI(); - const modelProvider = { - groq, - openai, - googleCloudAI, - azureAI, - }[provider]; + const modelProvider = getModelProvider(provider); const { text, toolResults } = await generateText({ model: modelProvider(model), // Use selected model diff --git a/src/app/genui/page.tsx b/src/app/genui/page.tsx index 4e8dfa0..d03c678 100644 --- a/src/app/genui/page.tsx +++ b/src/app/genui/page.tsx @@ -12,14 +12,15 @@ export const maxDuration = 30; export default function GenUI() { const [conversation, setConversation] = useState([]); const [input, setInput] = useState(''); - const [selectedModel, setSelectedModel] = useState('groq'); // Add state for selected model + const [selectedProvider, setSelectedProvider] = useState('groq'); // Add state for selected provider + const [selectedModel, setSelectedModel] = useState('llama3-8b-8192'); // Add state for selected model const handleSubmit = async () => { const { messages } = await continueConversation([ // exclude React components from being sent back to the server: ...conversation.map(({ role, content }) => ({ role, content })), { role: 'user', content: input }, - ], selectedModel); // Pass selected model + ], selectedProvider, selectedModel); // Pass both provider and model setInput("") setConversation(messages); } @@ -53,8 +54,8 @@ export default function GenUI() {
+ ([]); const [input, setInput] = useState(''); - const [selectedModel, setSelectedModel] = useState('groq'); // Add state for selected model + const [selectedProvider, setSelectedProvider] = useState('groq'); // Add state for selected provider + const [selectedModel, setSelectedModel] = useState('llama3-8b-8192'); // Add state for selected model const handleSubmit = async (e: React.FormEvent) => { e.preventDefault() @@ -25,7 +26,7 @@ export default function Chat() { ]; setMessages(newMessages); setInput(''); - const result = await continueTextConversation(newMessages, selectedModel); // Pass selected model + const result = await continueTextConversation(newMessages, selectedProvider, selectedModel); // Pass both provider and model for await (const content of readStreamableValue(result)) { setMessages([ ...newMessages, @@ -59,8 +60,8 @@ export default function Chat() {
+ Date: Tue, 29 Oct 2024 08:22:20 -0400 Subject: [PATCH 5/9] Add `getChatCompletion` function and update OpenAI API key setup * **`src/app/actions.tsx`** - Add `getChatCompletion` function to use `createChatCompletion` with the OpenAI API - Include error handling for the API request * **`README.md`** - Update instructions to include setting up the OpenAI API key in the environment variables * **`tsconfig.json`** - Update `compilerOptions` to match the provided configuration - Ensure `include` includes the `src` directory --- README.md | 1 + src/app/actions.tsx | 28 ++++++++++++++++++++++++++++ tsconfig.json | 26 +++++--------------------- 3 files changed, 34 insertions(+), 21 deletions(-) diff --git a/README.md b/README.md index 7d3e7b9..57c51a0 100644 --- a/README.md +++ b/README.md @@ -15,6 +15,7 @@ ``` 3. Set up your environment variables: - Add your `GROQ_API_KEY` to the `.env` file + - Add your `OPENAI_API_KEY` to the `.env` file 4. Run the development server: ```bash diff --git a/src/app/actions.tsx b/src/app/actions.tsx index 35ba4d0..d236c8c 100644 --- a/src/app/actions.tsx +++ b/src/app/actions.tsx @@ -6,6 +6,7 @@ import { createOpenAI } from '@ai-sdk/openai'; import { createStreamableUI } from 'ai/rsc'; import { ReactNode } from 'react'; import { z } from 'zod'; +import { Configuration, OpenAIApi } from "openai"; // Import OpenAI API // Add Groq provider const groq = createOpenAI({ @@ -88,3 +89,30 @@ export async function checkAIAvailability() { const envVarExists = !!process.env.GROQ_API_KEY; return envVarExists; } + +// Function to get chat completion using OpenAI API +export async function getChatCompletion() { + const configuration = new Configuration({ + apiKey: process.env.OPENAI_API_KEY, + }); + + const openai = new OpenAIApi(configuration); + + try { + const response = await openai.createChatCompletion({ + model: "gpt-3.5-turbo", + messages: [ + { role: "system", content: "You are a helpful assistant." }, + { role: "user", content: "Tell me a joke." }, + ], + }); + + console.log(response.data.choices[0].message?.content); + } catch (error) { + if (error.response) { + console.error(error.response.status, error.response.data); + } else { + console.error(`Error: ${error.message}`); + } + } +} diff --git a/tsconfig.json b/tsconfig.json index fccf2dc..7232f4a 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,27 +1,11 @@ { "compilerOptions": { - "lib": ["dom", "dom.iterable", "esnext"], - "allowJs": true, - "skipLibCheck": true, - "types": ["node"], + "target": "ES6", + "module": "commonjs", "strict": true, - "noEmit": true, "esModuleInterop": true, - "module": "esnext", - "moduleResolution": "bundler", - "resolveJsonModule": true, - "isolatedModules": true, - "jsx": "preserve", - "incremental": true, - "plugins": [ - { - "name": "next" - } - ], - "paths": { - "@/*": ["./src/*"] - } + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true }, - "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], - "exclude": ["node_modules"] + "include": ["src"] } From f6f9d054ef448663d23969b0374d54d4019b45eb Mon Sep 17 00:00:00 2001 From: William Trabazo <164008132+willchrisjr@users.noreply.github.com> Date: Tue, 29 Oct 2024 08:27:17 -0400 Subject: [PATCH 6/9] --- src/app/actions.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/app/actions.tsx b/src/app/actions.tsx index d236c8c..0888c2b 100644 --- a/src/app/actions.tsx +++ b/src/app/actions.tsx @@ -86,7 +86,7 @@ export async function continueConversation(history: Message[], provider: 'groq' // Utils export async function checkAIAvailability() { - const envVarExists = !!process.env.GROQ_API_KEY; + const envVarExists = !!process.env.GROQ_API_KEY || !!process.env.OPENAI_API_KEY; return envVarExists; } From 8e31e0d24c7260b2dace186205cd1abb635fa57f Mon Sep 17 00:00:00 2001 From: William Trabazo <164008132+willchrisjr@users.noreply.github.com> Date: Tue, 29 Oct 2024 08:34:56 -0400 Subject: [PATCH 7/9] Add compare mode to chat interface Add comparison mode for AI model responses. * **src/app/actions.tsx** - Add `compareAIModels` function to compare responses from different AI models. - Update `getModelProvider` function to include a new provider `compare`. - Update `continueTextConversation` and `continueConversation` functions to handle the `compare` provider. * **src/app/genui/page.tsx** - Add `compareMode` state to toggle comparison mode. - Update `handleSubmit` function to handle comparison mode. - Add a new button to toggle comparison mode in the UI. * **src/components/chat.tsx** - Add `compareMode` state to toggle comparison mode. - Update `handleSubmit` function to handle comparison mode. - Add a new button to toggle comparison mode in the UI. --- For more details, open the [Copilot Workspace session](https://copilot-workspace.githubnext.com/willchrisjr/react-next-app?shareId=XXXX-XXXX-XXXX-XXXX). --- src/app/actions.tsx | 42 ++++++++++++++++++++++++++++++++++++++--- src/app/genui/page.tsx | 13 ++++++++++++- src/components/chat.tsx | 13 ++++++++++++- 3 files changed, 63 insertions(+), 5 deletions(-) diff --git a/src/app/actions.tsx b/src/app/actions.tsx index 0888c2b..39f1640 100644 --- a/src/app/actions.tsx +++ b/src/app/actions.tsx @@ -36,7 +36,7 @@ export interface Message { } // Function to get model provider -function getModelProvider(provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI') { +function getModelProvider(provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI' | 'compare') { const modelProviders = { groq, openai, @@ -46,8 +46,30 @@ function getModelProvider(provider: 'groq' | 'openai' | 'googleCloudAI' | 'azure return modelProviders[provider]; } +// Function to compare responses from different AI models +export async function compareAIModels(messages: CoreMessage[], models: string[]) { + const results = await Promise.all(models.map(async (model) => { + const result = await streamText({ + model: openai(model), // Assuming openai for comparison, adjust as needed + messages, + }); + return { + model, + response: result.textStream, + }; + })); + + return results; +} + // Streaming Chat -export async function continueTextConversation(messages: CoreMessage[], provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI' = 'groq', model: string = 'llama3-8b-8192') { +export async function continueTextConversation(messages: CoreMessage[], provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI' | 'compare' = 'groq', model: string = 'llama3-8b-8192') { + if (provider === 'compare') { + const models = ['llama3-8b-8192', 'gpt-3.5-turbo', 'palm-2', 'davinci']; // Example models to compare + const results = await compareAIModels(messages, models); + return results; + } + const modelProvider = getModelProvider(provider); const result = await streamText({ @@ -60,7 +82,21 @@ export async function continueTextConversation(messages: CoreMessage[], provider } // Gen UIs -export async function continueConversation(history: Message[], provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI' = 'groq', model: string = 'llama3-8b-8192') { +export async function continueConversation(history: Message[], provider: 'groq' | 'openai' | 'googleCloudAI' | 'azureAI' | 'compare' = 'groq', model: string = 'llama3-8b-8192') { + if (provider === 'compare') { + const models = ['llama3-8b-8192', 'gpt-3.5-turbo', 'palm-2', 'davinci']; // Example models to compare + const results = await compareAIModels(history, models); + return { + messages: [ + ...history, + ...results.map(result => ({ + role: 'assistant' as const, + content: result.response, + })), + ], + }; + } + const stream = createStreamableUI(); const modelProvider = getModelProvider(provider); diff --git a/src/app/genui/page.tsx b/src/app/genui/page.tsx index d03c678..1bb501e 100644 --- a/src/app/genui/page.tsx +++ b/src/app/genui/page.tsx @@ -14,13 +14,14 @@ export default function GenUI() { const [input, setInput] = useState(''); const [selectedProvider, setSelectedProvider] = useState('groq'); // Add state for selected provider const [selectedModel, setSelectedModel] = useState('llama3-8b-8192'); // Add state for selected model + const [compareMode, setCompareMode] = useState(false); // Add state for comparison mode const handleSubmit = async () => { const { messages } = await continueConversation([ // exclude React components from being sent back to the server: ...conversation.map(({ role, content }) => ({ role, content })), { role: 'user', content: input }, - ], selectedProvider, selectedModel); // Pass both provider and model + ], compareMode ? 'compare' : selectedProvider, selectedModel); // Pass both provider and model, handle comparison mode setInput("") setConversation(messages); } @@ -57,6 +58,7 @@ export default function GenUI() { value={selectedProvider} onChange={(e) => setSelectedProvider(e.target.value)} className="mr-2 p-2 border rounded" + disabled={compareMode} // Disable provider selection in comparison mode > @@ -67,6 +69,7 @@ export default function GenUI() { value={selectedModel} onChange={(e) => setSelectedModel(e.target.value)} className="mr-2 p-2 border rounded" + disabled={compareMode} // Disable model selection in comparison mode > @@ -90,6 +93,14 @@ export default function GenUI() {
+
+ +
diff --git a/src/components/chat.tsx b/src/components/chat.tsx index 3b3d323..73cccb7 100644 --- a/src/components/chat.tsx +++ b/src/components/chat.tsx @@ -17,6 +17,7 @@ export default function Chat() { const [input, setInput] = useState(''); const [selectedProvider, setSelectedProvider] = useState('groq'); // Add state for selected provider const [selectedModel, setSelectedModel] = useState('llama3-8b-8192'); // Add state for selected model + const [compareMode, setCompareMode] = useState(false); // Add state for comparison mode const handleSubmit = async (e: React.FormEvent) => { e.preventDefault() @@ -26,7 +27,7 @@ export default function Chat() { ]; setMessages(newMessages); setInput(''); - const result = await continueTextConversation(newMessages, selectedProvider, selectedModel); // Pass both provider and model + const result = await continueTextConversation(newMessages, compareMode ? 'compare' : selectedProvider, selectedModel); // Pass both provider and model, handle comparison mode for await (const content of readStreamableValue(result)) { setMessages([ ...newMessages, @@ -63,6 +64,7 @@ export default function Chat() { value={selectedProvider} onChange={(e) => setSelectedProvider(e.target.value)} className="mr-2 p-2 border rounded" + disabled={compareMode} // Disable provider selection in comparison mode > @@ -73,6 +75,7 @@ export default function Chat() { value={selectedModel} onChange={(e) => setSelectedModel(e.target.value)} className="mr-2 p-2 border rounded" + disabled={compareMode} // Disable model selection in comparison mode > @@ -98,6 +101,14 @@ export default function Chat() {
)}
+
+ +
From 40d50091075f6d180a31f241bff55fc69b6fbf2e Mon Sep 17 00:00:00 2001 From: William Trabazo <164008132+willchrisjr@users.noreply.github.com> Date: Tue, 29 Oct 2024 08:43:06 -0400 Subject: [PATCH 8/9] Import `axios` and `dotenv` and update `getChatCompletion` function to use Azure OpenAI API * **Environment Variables** - Load environment variables using `dotenv.config()` - Retrieve `AZURE_OPENAI_API_KEY` and `AZURE_OPENAI_ENDPOINT` from environment variables - Throw error if API key or endpoint is not defined * **getChatCompletion Function** - Update to use Azure OpenAI API key and endpoint - Log the response from Azure OpenAI API --- src/app/actions.tsx | 51 +++++++++++++++++++++++++++------------------ 1 file changed, 31 insertions(+), 20 deletions(-) diff --git a/src/app/actions.tsx b/src/app/actions.tsx index 39f1640..a25f0e2 100644 --- a/src/app/actions.tsx +++ b/src/app/actions.tsx @@ -7,6 +7,19 @@ import { createStreamableUI } from 'ai/rsc'; import { ReactNode } from 'react'; import { z } from 'zod'; import { Configuration, OpenAIApi } from "openai"; // Import OpenAI API +import axios from 'axios'; +import dotenv from 'dotenv'; + +// Load environment variables from .env file +dotenv.config(); + +// Retrieve API key and endpoint from environment variables +const apiKey = process.env.AZURE_OPENAI_API_KEY; +const endpoint = process.env.AZURE_OPENAI_ENDPOINT; + +if (!apiKey || !endpoint) { + throw new Error('API key or endpoint is not defined in the environment variables'); +} // Add Groq provider const groq = createOpenAI({ @@ -128,27 +141,25 @@ export async function checkAIAvailability() { // Function to get chat completion using OpenAI API export async function getChatCompletion() { - const configuration = new Configuration({ - apiKey: process.env.OPENAI_API_KEY, - }); - - const openai = new OpenAIApi(configuration); - try { - const response = await openai.createChatCompletion({ - model: "gpt-3.5-turbo", - messages: [ - { role: "system", content: "You are a helpful assistant." }, - { role: "user", content: "Tell me a joke." }, - ], - }); - - console.log(response.data.choices[0].message?.content); + const response = await axios.post( + endpoint, + { + messages: [ + { role: 'system', content: 'You are a helpful assistant.' }, + { role: 'user', content: 'Tell me a joke.' }, + ], + }, + { + headers: { + 'Content-Type': 'application/json', + 'api-key': apiKey, + }, + } + ); + + console.log(response.data.choices[0].message.content); } catch (error) { - if (error.response) { - console.error(error.response.status, error.response.data); - } else { - console.error(`Error: ${error.message}`); - } + console.error('Error:', error.response ? error.response.data : error.message); } } From cbea8b17bf3c40e52ca6b328aa5e3128d1ebdb4f Mon Sep 17 00:00:00 2001 From: William Trabazo <164008132+willchrisjr@users.noreply.github.com> Date: Tue, 29 Oct 2024 08:47:41 -0400 Subject: [PATCH 9/9] Update `src/app/actions.tsx` to support Azure AI provider and update `README.md` * **Azure AI provider:** - Add Azure AI provider with `getChatCompletion` function using provided code snippet. - Update `continueTextConversation` and `continueConversation` functions to support `azureAI` provider. - Update `checkAIAvailability` function to check for `AZURE_OPENAI_API_KEY` and `AZURE_OPENAI_ENDPOINT`. * **README.md:** - Add instructions to set up `AZURE_OPENAI_API_KEY` and `AZURE_OPENAI_ENDPOINT` in the `.env` file. - Update the list of AI model providers to include Azure AI. --- README.md | 2 ++ src/app/actions.tsx | 49 +++++++++++++++++++++++++++++++++++++++++---- 2 files changed, 47 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 57c51a0..cdc51f9 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,8 @@ 3. Set up your environment variables: - Add your `GROQ_API_KEY` to the `.env` file - Add your `OPENAI_API_KEY` to the `.env` file + - Add your `AZURE_OPENAI_API_KEY` to the `.env` file + - Add your `AZURE_OPENAI_ENDPOINT` to the `.env` file 4. Run the development server: ```bash diff --git a/src/app/actions.tsx b/src/app/actions.tsx index a25f0e2..dfa65ab 100644 --- a/src/app/actions.tsx +++ b/src/app/actions.tsx @@ -38,9 +38,32 @@ const googleCloudAI = createOpenAI({ }); // Add Azure AI provider -const azureAI = createOpenAI({ - apiKey: process.env.AZURE_AI_API_KEY, -}); +const azureAI = { + async getChatCompletion(messages: CoreMessage[]) { + try { + const response = await axios.post( + endpoint, + { + messages: [ + { role: 'system', content: 'You are a helpful assistant.' }, + ...messages, + ], + }, + { + headers: { + 'Content-Type': 'application/json', + 'api-key': apiKey, + }, + } + ); + + return response.data.choices[0].message.content; + } catch (error) { + console.error('Error:', error.response ? error.response.data : error.message); + throw error; + } + } +}; export interface Message { role: 'user' | 'assistant'; @@ -85,6 +108,11 @@ export async function continueTextConversation(messages: CoreMessage[], provider const modelProvider = getModelProvider(provider); + if (provider === 'azureAI') { + const response = await azureAI.getChatCompletion(messages); + return createStreamableValue(response).value; + } + const result = await streamText({ model: modelProvider(model), // Use selected model messages, @@ -114,6 +142,19 @@ export async function continueConversation(history: Message[], provider: 'groq' const modelProvider = getModelProvider(provider); + if (provider === 'azureAI') { + const response = await azureAI.getChatCompletion(history); + return { + messages: [ + ...history, + { + role: 'assistant' as const, + content: response, + }, + ], + }; + } + const { text, toolResults } = await generateText({ model: modelProvider(model), // Use selected model system: 'You are a friendly weather assistant!', @@ -135,7 +176,7 @@ export async function continueConversation(history: Message[], provider: 'groq' // Utils export async function checkAIAvailability() { - const envVarExists = !!process.env.GROQ_API_KEY || !!process.env.OPENAI_API_KEY; + const envVarExists = !!process.env.GROQ_API_KEY || !!process.env.OPENAI_API_KEY || !!process.env.AZURE_OPENAI_API_KEY || !!process.env.AZURE_OPENAI_ENDPOINT; return envVarExists; }