Skip to content

Commit

Permalink
Added Claude Models
Browse files Browse the repository at this point in the history
  • Loading branch information
Zaki-1052 committed Mar 15, 2024
1 parent 5bfa404 commit 352602d
Show file tree
Hide file tree
Showing 3 changed files with 126 additions and 39 deletions.
6 changes: 6 additions & 0 deletions public/portal.html
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@
<button id="model-gemini-pro-vision" data-value="gemini-pro-vision">Gemini Pro Vision</button>
<button id="model-gemini-1.5-pro" data-value="gemini-1.5-pro">Gemini-1.5 Pro</button>
<button id="model-gemini-ultra" data-value="gemini-1.0-ultra">Gemini Ultra</button>
<button id="model-claude-opus" data-value="claude-3-opus-20240229">Claude Opus</button>
<button id="model-claude-sonnet" data-value="claude-3-sonnet-20240229">Claude Sonnet</button>
<button id="model-claude-haiku" data-value="claude-3-haiku">Claude Haiku</button>
<button id="model-claude-2.1" data-value="claude-2.1">Claude 2.1</button>
<button id="model-claude-2.0" data-value="claude-2.0">Claude 2.0</button>
<button id="model-claude-1.2" data-value="claude-instant-1.2">Claude 1.2</button>
<button id="model-mistral-tiny" data-value="mistral-tiny-2312">Mistral Tiny</button>
<button id="model-mistral-8x7b" data-value="mistral-small-2312">Mixtral 8x7b</button>
<button id="model-mistral-small" data-value="mistral-small-latest">Mistral Small</button>
Expand Down
35 changes: 35 additions & 0 deletions public/script.js
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,12 @@ fetchConfig();
"Gemini-Pro-Vision": "gemini-pro-vision",
"Gemini-1.5-Pro": "gemini-1.5-pro",
"Gemini-Ultra": "gemini-1.0-ultra",
"Claude-Opus": "claude-3-opus-20240229",
"Claude-Sonnet": "claude-3-sonnet-20240229",
"Claude-Haiku": "claude-3-haiku",
"Claude-2.1": "claude-2.1",
"Claude-2.0": "claude-2.0",
"Claude-1.2": "claude-instant-1.2",
"Mistral-Tiny": "mistral-tiny-2312",
"Mistral-8x7b": "mistral-small-2312",
"Mistral-Small": "mistral-small-latest",
Expand All @@ -59,6 +65,12 @@ fetchConfig();
"gemini-pro-vision": "Gemini-Pro-Vision",
"gemini-1.5-pro": "Gemini-1.5-Pro",
"gemini-1.0-ultra": "Gemini-Ultra",
"claude-3-opus-20240229": "Claude-Opus",
"claude-3-sonnet-20240229": "Claude-Sonnet",
"claude-3-haiku": "Claude-Haiku",
"claude-2.1": "Claude-2.1",
"claude-2.0": "Claude-2.0",
"claude-instant-1.2": "Claude-1.2",
"mistral-tiny-2312": "Mistral-Tiny",
"mistral-small-2312": "Mistral-8x7b",
"mistral-small-latest": "Mistral-Small",
Expand Down Expand Up @@ -229,6 +241,12 @@ const selectedModelDisplayName = document.getElementById('selected-model').textC
"gemini-pro-vision": "Gemini-Vision: View Images — One-Time Use",
"gemini-1.5-pro": "Gemini-Pro-1.5: Early Access — 1 Million Tokens",
"gemini-1.0-ultra": "Gemini-Ultra: Largest Google Model — Unreleased",
"claude-3-opus-20240229": "Claude-Opus: Highest Performance — GPT-4 Level",
"claude-3-sonnet-20240229": "Claude-Sonnet: Cheaper Model — 3.5 Level",
"claude-3-haiku": "Claude-Haiku: Cheapest New Model — Unreleased",
"claude-2.1": "Claude-2.1: Best Instant Model",
"claude-2.0": "Claude-2.0: Average Cheap Model",
"claude-1.2": "Claude-1.2: Cheapest Instant Model",
"mistral-tiny-2312": "Mistral-Tiny: Cheapest — Open Source 7B",
"mistral-small-2312": "Mixtral 7xB: Mixture of Experts (MoE) Model",
"mistral-small-latest": "Mistral-Small: Smarter — More Costly",
Expand Down Expand Up @@ -352,6 +370,23 @@ document.getElementById('model-mistral-small').addEventListener('mouseover', (ev
document.getElementById('model-mistral-medium').addEventListener('mouseover', (event) => showCustomTooltip(modelDescriptions["mistral-medium-latest"], event.currentTarget));
document.getElementById('model-mistral-large').addEventListener('mouseover', (event) => showCustomTooltip(modelDescriptions["mistral-medium-latest"], event.currentTarget));

// Event listeners for selecting Claude models
document.getElementById('model-claude-opus').addEventListener('click', () => selectModel('claude-3-opus-20240229'));
document.getElementById('model-claude-sonnet').addEventListener('click', () => selectModel('claude-3-sonnet-20240229'));
document.getElementById('model-claude-haiku').addEventListener('click', () => selectModel('claude-3-haiku'));
document.getElementById('model-claude-2.1').addEventListener('click', () => selectModel('claude-2.1'));
document.getElementById('model-claude-2.0').addEventListener('click', () => selectModel('claude-2.0'));
document.getElementById('model-claude-1.2').addEventListener('click', () => selectModel('claude-instant-1.2'));

// Event listeners for showing Claude model descriptions on hover
document.getElementById('model-claude-opus').addEventListener('mouseover', (event) => showCustomTooltip(modelDescriptions["claude-3-opus-20240229"], event.currentTarget));
document.getElementById('model-claude-sonnet').addEventListener('mouseover', (event) => showCustomTooltip(modelDescriptions["claude-3-sonnet-20240229"], event.currentTarget));
document.getElementById('model-claude-haiku').addEventListener('mouseover', (event) => showCustomTooltip(modelDescriptions["claude-3-haiku"], event.currentTarget));
document.getElementById('model-claude-2.1').addEventListener('mouseover', (event) => showCustomTooltip(modelDescriptions["claude-2.1"], event.currentTarget));
document.getElementById('model-claude-2.0').addEventListener('mouseover', (event) => showCustomTooltip(modelDescriptions["claude-2.0"], event.currentTarget));
document.getElementById('model-claude-1.2').addEventListener('mouseover', (event) => showCustomTooltip(modelDescriptions["claude-instant-1.2"], event.currentTarget));


// Add mouseout event listener for all model buttons
document.querySelectorAll('.select-options button').forEach(button => {
button.addEventListener('mouseout', hideCustomTooltip);
Expand Down
124 changes: 85 additions & 39 deletions server.js
Original file line number Diff line number Diff line change
Expand Up @@ -274,6 +274,28 @@ initializeGeminiConversationHistory();

// Function to convert conversation history to HTML
function exportChatToHTML() {
// Log the current state of both conversation histories before deciding which one to use
console.log("Current GPT Conversation History: ", JSON.stringify(conversationHistory, null, 2));
console.log("Current Claude Conversation History: ", JSON.stringify(claudeHistory, null, 2));

let containsAssistantMessage = conversationHistory.some(entry => entry.role === 'assistant');

let chatHistory;
if (containsAssistantMessage) {
console.log("Using GPT conversation history because it's non-empty.");
chatHistory = conversationHistory;
} else {
console.log("Using Claude conversation history as GPT history is empty or undefined.");
chatHistory = [...claudeHistory];
chatHistory.unshift({
role: 'system',
content: systemMessage
});
}

// Log the determined chatHistory
console.log("Determined Chat History: ", JSON.stringify(chatHistory, null, 2));

let htmlContent = `
<html>
<head>
Expand All @@ -291,7 +313,9 @@ initializeGeminiConversationHistory();
<body>
`;

conversationHistory.forEach(entry => {
console.log("Chat History: ", JSON.stringify(chatHistory, null, 2));

chatHistory.forEach(entry => {
let formattedContent = '';

if (Array.isArray(entry.content)) {
Expand Down Expand Up @@ -885,6 +909,8 @@ console.log(googleModel);

let headers;
let apiUrl = '';
let data;
let claudeHistory = [];

app.post('/message', async (req, res) => {
console.log("req.file:", req.file); // Check if the file is received
Expand Down Expand Up @@ -938,7 +964,7 @@ if (user_message === "Bye!") {

// Assuming modelID is declared globally and available here
// Determine the structure of user_input.content based on modelID
if (modelID.startsWith('gpt')) {
if (modelID.startsWith('gpt') || modelID.startsWith('claude')) {

// Add text content if present
if (user_message) {
Expand Down Expand Up @@ -972,7 +998,7 @@ if (modelID.startsWith('gpt')) {
});
}
}
} else if (modelID.startsWith('mistral')) {
} else {
// For Mistral models, user_input.content is a string and set to user_message
user_input = {
role: "user",
Expand All @@ -996,7 +1022,6 @@ if (modelID.startsWith('gpt')) {
}


conversationHistory.push(user_input);



Expand All @@ -1006,28 +1031,7 @@ conversationHistory.push(user_input);


// Define the data payload with system message and additional parameters
const data = {

// model: "gpt-4-vision-preview", // Use "gpt-4" for non-vision capabilities.
// Model is specified here as the vision-capable GPT-4.
// If users are using this portal solely for its intelligence, and do not care about "vision", then they should change the model name.
// The Model Name can be changed to:
// model: "gpt-4",
// So Delete the "// " before "model" labelling GPT-4 and add/put them before "model: "gpt-4-vision-preview", if you'd like to switch.
// This is called "commenting out", and is good practice for code maintainability, like:

// model: "gpt-4-vision-preview",

// model: "gpt-4",

// there's also the higher 32k context model

// model: "gpt-4-32k",

// use this longer context model **only** if you've considered the expenses properly

// The Default Model is now Default GPT-4, pointing to the snapshot released on August 13th.
// If users would like to use Vision capabilities, please comment out the above model and comment in the "vision-preview" at the top.
data = {

// UPDATE: Model Selector added for variability

Expand All @@ -1038,7 +1042,7 @@ conversationHistory.push(user_input);
temperature: 1, // Controls randomness: Lowering results in less random completions.
// As the temperature approaches zero, the model will become deterministic and repetitive.

top_p: 1, // Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered.
// top_p: 1, // Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered.

max_tokens: 4000, // The maximum number of tokens to **generate** shared between the prompt and completion. The exact limit varies by model.
// (One token is roughly 4 characters for standard English text)
Expand All @@ -1051,7 +1055,7 @@ conversationHistory.push(user_input);
// How much to penalize new tokens based on whether they appear in the text so far.
// Increases the model's likelihood to talk about new topics.

stream: true, // streaming messages from server to api for better memory efficiency
// stream: true, // streaming messages from server to api for better memory efficiency

// Additional Parameters
// Stop Sequences
Expand Down Expand Up @@ -1089,27 +1093,49 @@ conversationHistory.push(user_input);
// Define the headers with the Authorization and, if needed, Organization
// Determine the API to use based on modelID prefix
if (modelID.startsWith('gpt')) {
conversationHistory.push(user_input);
headers = {
'Authorization': `Bearer ${process.env.OPENAI_API_KEY}`,
// 'OpenAI-Organization': 'process.env.ORGANIZATION' // Uncomment if using an organization ID
};
apiUrl = 'https://api.openai.com/v1/chat/completions';
} else if (modelID.startsWith('mistral')) {
conversationHistory.push(user_input);
headers = {
'Authorization': `Bearer ${process.env.MISTRAL_API_KEY}`,
// Add any Mistral-specific headers here if necessary
};
apiUrl = 'https://api.mistral.ai/v1/chat/completions';
} else if (modelID.startsWith('claude')) {
claudeHistory.push(user_input);
data = {
// New data structure for Claude model
model: modelID,
max_tokens: 4000,
temperature: 1,
system: systemMessage,
messages: claudeHistory,
};
headers = {
'x-api-key': `${process.env.CLAUDE_API_KEY}`,
'content-type': 'application/json',
'anthropic-version': '2023-06-01',
// Add any Mistral-specific headers here if necessary
};
apiUrl = 'https://api.anthropic.com/v1/messages';
}

// Log the data payload just before sending it to the chosen API
console.log("API URL", apiUrl);
console.log(`Sending to ${modelID.startsWith('gpt') ? 'OpenAI' : 'Mistral'} API:`, JSON.stringify(data, null, 2));
console.log(`Sending to ${modelID.startsWith('gpt') ? 'OpenAI' : 'Mistral/Claude'} API:`, JSON.stringify(data, null, 2));

try {
const response = await axios.post(apiUrl, data, { headers, responseType: 'stream' });
// const response = await axios.post(apiUrl, data, { headers, responseType: 'stream' });
const response = await axios.post(apiUrl, data, { headers });
// Process the response as needed

// optional streaming implentation (currently disabled)

/*
let buffer = '';
response.data.on('data', (chunk) => {
Expand All @@ -1130,26 +1156,46 @@ conversationHistory.push(user_input);
}
}
}
*/
let messageContent;

console.log(messageContent);
const lastMessageContent = messageContent;
if (modelID.startsWith('claude')) {
messageContent = response.data.content;
} else {
messageContent = response.data.choices[0].message.content;
}
const lastMessageContent = messageContent;


if (lastMessageContent) {
// Add assistant's message to the conversation history
conversationHistory.push({ role: "assistant", content: lastMessageContent.trim() });

// Send this back to the client
res.json({ text: lastMessageContent.trim() });

console.log("Assistant Response: ", lastMessageContent)

if (modelID.startsWith('claude')) {
claudeHistory.push({ role: "assistant", content: lastMessageContent[0].text });
console.log("Claude History: ", claudeHistory);
res.json({ text: lastMessageContent[0].text });
} else {
// Add assistant's message to the conversation history
conversationHistory.push({ role: "assistant", content: lastMessageContent });
console.log("Conversation History: ", conversationHistory);
// Send this back to the client
res.json({ text: lastMessageContent });
}



} else {
// Handle no content scenario
res.status(500).json({ error: "No text was returned from the API" });
}
/*
} catch (parseError) {
console.error('Error parsing complete response:', parseError.message);
res.status(500).json({ error: "Error parsing the response from OpenAI API" });
}
});

*/
} catch (error) {
console.error('Error calling OpenAI API:', error.message);
if (error.response) {
Expand Down

0 comments on commit 352602d

Please sign in to comment.