diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml new file mode 100644 index 00000000000..fc1c02db69f --- /dev/null +++ b/.github/workflows/deploy-dev.yml @@ -0,0 +1,41 @@ +name: Update Test Server + +on: + workflow_run: + workflows: ["Docker Dev Images Build"] + types: + - completed + workflow_dispatch: + +jobs: + deploy: + runs-on: ubuntu-latest + if: | + github.repository == 'danny-avila/LibreChat' && + (github.event_name == 'workflow_dispatch' || github.event.workflow_run.conclusion == 'success') + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install SSH Key + uses: shimataro/ssh-key-action@v2 + with: + key: ${{ secrets.DO_SSH_PRIVATE_KEY }} + known_hosts: ${{ secrets.DO_KNOWN_HOSTS }} + + - name: Run update script on DigitalOcean Droplet + env: + DO_HOST: ${{ secrets.DO_HOST }} + DO_USER: ${{ secrets.DO_USER }} + run: | + ssh -o StrictHostKeyChecking=no ${DO_USER}@${DO_HOST} << EOF + sudo -i -u danny bash << EEOF + cd ~/LibreChat && \ + git fetch origin main && \ + npm run update:deployed && \ + git checkout do-deploy && \ + git rebase main && \ + npm run start:deployed && \ + echo "Update completed. Application should be running now." + EEOF + EOF diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000000..16b4104980a --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,16 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "Launch LibreChat (debug)", + "skipFiles": ["/**"], + "program": "${workspaceFolder}/api/server/index.js", + "env": { + "NODE_ENV": "production" + }, + "console": "integratedTerminal" + } + ] +} diff --git a/Dockerfile b/Dockerfile index 1ace3200d51..3a8f3484be3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -# v0.7.3 +# v0.7.4 # Base node image FROM node:20-alpine AS node diff --git a/Dockerfile.multi b/Dockerfile.multi index d8fe9d951f8..b4596daa002 100644 --- a/Dockerfile.multi +++ b/Dockerfile.multi @@ -1,4 +1,4 @@ -# v0.7.3 +# v0.7.4 # Build API, Client and Data Provider FROM node:20-alpine AS base diff --git a/README.md b/README.md index 93f80444ae1..20751412ecc 100644 --- a/README.md +++ b/README.md @@ -81,7 +81,7 @@ LibreChat brings together the future of assistant AIs with the revolutionary tec With LibreChat, you no longer need to opt for ChatGPT Plus and can instead use free or pay-per-call APIs. We welcome contributions, cloning, and forking to enhance the capabilities of this advanced chatbot platform. -[![Watch the video](https://img.youtube.com/vi/bSVHEbVPNl4/maxresdefault.jpg)](https://www.youtube.com/watch?v=bSVHEbVPNl4) +[![Watch the video](https://raw.githubusercontent.com/LibreChat-AI/librechat.ai/main/public/images/changelog/v0.7.4.png)](https://www.youtube.com/watch?v=cvosUxogdpI) Click on the thumbnail to open the video☝️ --- diff --git a/api/app/clients/tools/structured/GoogleSearch.js b/api/app/clients/tools/structured/GoogleSearch.js index bae1a458e0d..c2cf72b9d66 100644 --- a/api/app/clients/tools/structured/GoogleSearch.js +++ b/api/app/clients/tools/structured/GoogleSearch.js @@ -12,9 +12,15 @@ class GoogleSearchResults extends Tool { this.envVarApiKey = 'GOOGLE_SEARCH_API_KEY'; this.envVarSearchEngineId = 'GOOGLE_CSE_ID'; this.override = fields.override ?? false; - this.apiKey = fields.apiKey ?? getEnvironmentVariable(this.envVarApiKey); + this.apiKey = fields[this.envVarApiKey] ?? getEnvironmentVariable(this.envVarApiKey); this.searchEngineId = - fields.searchEngineId ?? getEnvironmentVariable(this.envVarSearchEngineId); + fields[this.envVarSearchEngineId] ?? getEnvironmentVariable(this.envVarSearchEngineId); + + if (!this.override && (!this.apiKey || !this.searchEngineId)) { + throw new Error( + `Missing ${this.envVarApiKey} or ${this.envVarSearchEngineId} environment variable.`, + ); + } this.kwargs = fields?.kwargs ?? {}; this.name = 'google'; diff --git a/api/app/clients/tools/structured/TavilySearchResults.js b/api/app/clients/tools/structured/TavilySearchResults.js index b1691a63dbf..9a62053ff0d 100644 --- a/api/app/clients/tools/structured/TavilySearchResults.js +++ b/api/app/clients/tools/structured/TavilySearchResults.js @@ -12,7 +12,7 @@ class TavilySearchResults extends Tool { this.envVar = 'TAVILY_API_KEY'; /* Used to initialize the Tool without necessary variables. */ this.override = fields.override ?? false; - this.apiKey = fields.apiKey ?? this.getApiKey(); + this.apiKey = fields[this.envVar] ?? this.getApiKey(); this.kwargs = fields?.kwargs ?? {}; this.name = 'tavily_search_results_json'; diff --git a/api/app/clients/tools/structured/specs/GoogleSearch.spec.js b/api/app/clients/tools/structured/specs/GoogleSearch.spec.js new file mode 100644 index 00000000000..ff112653019 --- /dev/null +++ b/api/app/clients/tools/structured/specs/GoogleSearch.spec.js @@ -0,0 +1,50 @@ +const GoogleSearch = require('../GoogleSearch'); + +jest.mock('node-fetch'); +jest.mock('@langchain/core/utils/env'); + +describe('GoogleSearch', () => { + let originalEnv; + const mockApiKey = 'mock_api'; + const mockSearchEngineId = 'mock_search_engine_id'; + + beforeAll(() => { + originalEnv = { ...process.env }; + }); + + beforeEach(() => { + jest.resetModules(); + process.env = { + ...originalEnv, + GOOGLE_SEARCH_API_KEY: mockApiKey, + GOOGLE_CSE_ID: mockSearchEngineId, + }; + }); + + afterEach(() => { + jest.clearAllMocks(); + process.env = originalEnv; + }); + + it('should use mockApiKey and mockSearchEngineId when environment variables are not set', () => { + const instance = new GoogleSearch({ + GOOGLE_SEARCH_API_KEY: mockApiKey, + GOOGLE_CSE_ID: mockSearchEngineId, + }); + expect(instance.apiKey).toBe(mockApiKey); + expect(instance.searchEngineId).toBe(mockSearchEngineId); + }); + + it('should throw an error if GOOGLE_SEARCH_API_KEY or GOOGLE_CSE_ID is missing', () => { + delete process.env.GOOGLE_SEARCH_API_KEY; + expect(() => new GoogleSearch()).toThrow( + 'Missing GOOGLE_SEARCH_API_KEY or GOOGLE_CSE_ID environment variable.', + ); + + process.env.GOOGLE_SEARCH_API_KEY = mockApiKey; + delete process.env.GOOGLE_CSE_ID; + expect(() => new GoogleSearch()).toThrow( + 'Missing GOOGLE_SEARCH_API_KEY or GOOGLE_CSE_ID environment variable.', + ); + }); +}); diff --git a/api/app/clients/tools/structured/specs/TavilySearchResults.spec.js b/api/app/clients/tools/structured/specs/TavilySearchResults.spec.js new file mode 100644 index 00000000000..5ea00140c72 --- /dev/null +++ b/api/app/clients/tools/structured/specs/TavilySearchResults.spec.js @@ -0,0 +1,38 @@ +const TavilySearchResults = require('../TavilySearchResults'); + +jest.mock('node-fetch'); +jest.mock('@langchain/core/utils/env'); + +describe('TavilySearchResults', () => { + let originalEnv; + const mockApiKey = 'mock_api_key'; + + beforeAll(() => { + originalEnv = { ...process.env }; + }); + + beforeEach(() => { + jest.resetModules(); + process.env = { + ...originalEnv, + TAVILY_API_KEY: mockApiKey, + }; + }); + + afterEach(() => { + jest.clearAllMocks(); + process.env = originalEnv; + }); + + it('should throw an error if TAVILY_API_KEY is missing', () => { + delete process.env.TAVILY_API_KEY; + expect(() => new TavilySearchResults()).toThrow('Missing TAVILY_API_KEY environment variable.'); + }); + + it('should use mockApiKey when TAVILY_API_KEY is not set in the environment', () => { + const instance = new TavilySearchResults({ + TAVILY_API_KEY: mockApiKey, + }); + expect(instance.apiKey).toBe(mockApiKey); + }); +}); diff --git a/api/cache/getLogStores.js b/api/cache/getLogStores.js index 2b33751a046..1fdaee90062 100644 --- a/api/cache/getLogStores.js +++ b/api/cache/getLogStores.js @@ -35,11 +35,11 @@ const messages = isEnabled(USE_REDIS) ? new Keyv({ store: keyvRedis, ttl: Time.FIVE_MINUTES }) : new Keyv({ namespace: CacheKeys.MESSAGES, ttl: Time.FIVE_MINUTES }); -const tokenConfig = isEnabled(USE_REDIS) // ttl: 30 minutes +const tokenConfig = isEnabled(USE_REDIS) ? new Keyv({ store: keyvRedis, ttl: Time.THIRTY_MINUTES }) : new Keyv({ namespace: CacheKeys.TOKEN_CONFIG, ttl: Time.THIRTY_MINUTES }); -const genTitle = isEnabled(USE_REDIS) // ttl: 2 minutes +const genTitle = isEnabled(USE_REDIS) ? new Keyv({ store: keyvRedis, ttl: Time.TWO_MINUTES }) : new Keyv({ namespace: CacheKeys.GEN_TITLE, ttl: Time.TWO_MINUTES }); @@ -69,6 +69,7 @@ const namespaces = { registrations: createViolationInstance('registrations'), [ViolationTypes.TTS_LIMIT]: createViolationInstance(ViolationTypes.TTS_LIMIT), [ViolationTypes.STT_LIMIT]: createViolationInstance(ViolationTypes.STT_LIMIT), + [ViolationTypes.CONVO_ACCESS]: createViolationInstance(ViolationTypes.CONVO_ACCESS), [ViolationTypes.FILE_UPLOAD_LIMIT]: createViolationInstance(ViolationTypes.FILE_UPLOAD_LIMIT), [ViolationTypes.VERIFY_EMAIL_LIMIT]: createViolationInstance(ViolationTypes.VERIFY_EMAIL_LIMIT), [ViolationTypes.RESET_PASSWORD_LIMIT]: createViolationInstance( diff --git a/api/models/Conversation.js b/api/models/Conversation.js index 93ceac9b77c..19622ba7962 100644 --- a/api/models/Conversation.js +++ b/api/models/Conversation.js @@ -2,6 +2,20 @@ const Conversation = require('./schema/convoSchema'); const { getMessages, deleteMessages } = require('./Message'); const logger = require('~/config/winston'); +/** + * Searches for a conversation by conversationId and returns a lean document with only conversationId and user. + * @param {string} conversationId - The conversation's ID. + * @returns {Promise<{conversationId: string, user: string} | null>} The conversation object with selected fields or null if not found. + */ +const searchConversation = async (conversationId) => { + try { + return await Conversation.findOne({ conversationId }, 'conversationId user').lean(); + } catch (error) { + logger.error('[searchConversation] Error searching conversation', error); + throw new Error('Error searching conversation'); + } +}; + /** * Retrieves a single conversation for a given user and conversation ID. * @param {string} user - The user's ID. @@ -19,6 +33,7 @@ const getConvo = async (user, conversationId) => { module.exports = { Conversation, + searchConversation, /** * Saves a conversation to the database. * @param {Object} req - The request object. diff --git a/api/models/ConversationTag.js b/api/models/ConversationTag.js index 9745dce70bd..0293ea83c31 100644 --- a/api/models/ConversationTag.js +++ b/api/models/ConversationTag.js @@ -1,277 +1,242 @@ -//const crypto = require('crypto'); - -const logger = require('~/config/winston'); -const Conversation = require('./schema/convoSchema'); const ConversationTag = require('./schema/conversationTagSchema'); +const Conversation = require('./schema/convoSchema'); +const logger = require('~/config/winston'); -const SAVED_TAG = 'Saved'; - -const updateTagsForConversation = async (user, conversationId, tags) => { +/** + * Retrieves all conversation tags for a user. + * @param {string} user - The user ID. + * @returns {Promise} An array of conversation tags. + */ +const getConversationTags = async (user) => { try { - const conversation = await Conversation.findOne({ user, conversationId }); - if (!conversation) { - return { message: 'Conversation not found' }; - } - - const addedTags = tags.tags.filter((tag) => !conversation.tags.includes(tag)); - const removedTags = conversation.tags.filter((tag) => !tags.tags.includes(tag)); - for (const tag of addedTags) { - await ConversationTag.updateOne({ tag, user }, { $inc: { count: 1 } }, { upsert: true }); - } - for (const tag of removedTags) { - await ConversationTag.updateOne({ tag, user }, { $inc: { count: -1 } }); - } - conversation.tags = tags.tags; - await conversation.save({ timestamps: { updatedAt: false } }); - return conversation.tags; + return await ConversationTag.find({ user }).sort({ position: 1 }).lean(); } catch (error) { - logger.error('[updateTagsToConversation] Error updating tags', error); - return { message: 'Error updating tags' }; + logger.error('[getConversationTags] Error getting conversation tags', error); + throw new Error('Error getting conversation tags'); } }; +/** + * Creates a new conversation tag. + * @param {string} user - The user ID. + * @param {Object} data - The tag data. + * @param {string} data.tag - The tag name. + * @param {string} [data.description] - The tag description. + * @param {boolean} [data.addToConversation] - Whether to add the tag to a conversation. + * @param {string} [data.conversationId] - The conversation ID to add the tag to. + * @returns {Promise} The created tag. + */ const createConversationTag = async (user, data) => { try { - const cTag = await ConversationTag.findOne({ user, tag: data.tag }); - if (cTag) { - return cTag; + const { tag, description, addToConversation, conversationId } = data; + + const existingTag = await ConversationTag.findOne({ user, tag }).lean(); + if (existingTag) { + return existingTag; } - const addToConversation = data.addToConversation && data.conversationId; - const newTag = await ConversationTag.create({ - user, - tag: data.tag, - count: 0, - description: data.description, - position: 1, - }); + const maxPosition = await ConversationTag.findOne({ user }).sort('-position').lean(); + const position = (maxPosition?.position || 0) + 1; - await ConversationTag.updateMany( - { user, position: { $gte: 1 }, _id: { $ne: newTag._id } }, - { $inc: { position: 1 } }, + const newTag = await ConversationTag.findOneAndUpdate( + { tag, user }, + { + tag, + user, + count: addToConversation ? 1 : 0, + position, + description, + $setOnInsert: { createdAt: new Date() }, + }, + { + new: true, + upsert: true, + lean: true, + }, ); - if (addToConversation) { - const conversation = await Conversation.findOne({ - user, - conversationId: data.conversationId, - }); - if (conversation) { - const tags = [...(conversation.tags || []), data.tag]; - await updateTagsForConversation(user, data.conversationId, { tags }); - } else { - logger.warn('[updateTagsForConversation] Conversation not found', data.conversationId); - } + if (addToConversation && conversationId) { + await Conversation.findOneAndUpdate( + { user, conversationId }, + { $addToSet: { tags: tag } }, + { new: true }, + ); } - return await ConversationTag.findOne({ user, tag: data.tag }); + return newTag; } catch (error) { - logger.error('[createConversationTag] Error updating conversation tag', error); - return { message: 'Error updating conversation tag' }; + logger.error('[createConversationTag] Error creating conversation tag', error); + throw new Error('Error creating conversation tag'); } }; -const replaceOrRemoveTagInConversations = async (user, oldtag, newtag) => { +/** + * Updates an existing conversation tag. + * @param {string} user - The user ID. + * @param {string} oldTag - The current tag name. + * @param {Object} data - The updated tag data. + * @param {string} [data.tag] - The new tag name. + * @param {string} [data.description] - The updated description. + * @param {number} [data.position] - The new position. + * @returns {Promise} The updated tag. + */ +const updateConversationTag = async (user, oldTag, data) => { try { - const conversations = await Conversation.find({ user, tags: { $in: [oldtag] } }); - for (const conversation of conversations) { - if (newtag && newtag !== '') { - conversation.tags = conversation.tags.map((tag) => (tag === oldtag ? newtag : tag)); - } else { - conversation.tags = conversation.tags.filter((tag) => tag !== oldtag); - } - await conversation.save({ timestamps: { updatedAt: false } }); - } - } catch (error) { - logger.error('[replaceOrRemoveTagInConversations] Error updating conversation tags', error); - return { message: 'Error updating conversation tags' }; - } -}; + const { tag: newTag, description, position } = data; -const updateTagPosition = async (user, tag, newPosition) => { - try { - const cTag = await ConversationTag.findOne({ user, tag }); - if (!cTag) { - return { message: 'Tag not found' }; + const existingTag = await ConversationTag.findOne({ user, tag: oldTag }).lean(); + if (!existingTag) { + return null; } - const oldPosition = cTag.position; + if (newTag && newTag !== oldTag) { + const tagAlreadyExists = await ConversationTag.findOne({ user, tag: newTag }).lean(); + if (tagAlreadyExists) { + throw new Error('Tag already exists'); + } - if (newPosition === oldPosition) { - return cTag; + await Conversation.updateMany({ user, tags: oldTag }, { $set: { 'tags.$': newTag } }); } - const updateOperations = []; - - if (newPosition > oldPosition) { - // Move other tags up - updateOperations.push({ - updateMany: { - filter: { - user, - position: { $gt: oldPosition, $lte: newPosition }, - tag: { $ne: SAVED_TAG }, - }, - update: { $inc: { position: -1 } }, - }, - }); - } else { - // Move other tags down - updateOperations.push({ - updateMany: { - filter: { - user, - position: { $gte: newPosition, $lt: oldPosition }, - tag: { $ne: SAVED_TAG }, - }, - update: { $inc: { position: 1 } }, - }, - }); + const updateData = {}; + if (newTag) { + updateData.tag = newTag; + } + if (description !== undefined) { + updateData.description = description; + } + if (position !== undefined) { + await adjustPositions(user, existingTag.position, position); + updateData.position = position; } - // Update the target tag's position - updateOperations.push({ - updateOne: { - filter: { _id: cTag._id }, - update: { $set: { position: newPosition } }, - }, + return await ConversationTag.findOneAndUpdate({ user, tag: oldTag }, updateData, { + new: true, + lean: true, }); - - await ConversationTag.bulkWrite(updateOperations); - - return await ConversationTag.findById(cTag._id); } catch (error) { - logger.error('[updateTagPosition] Error updating tag position', error); - return { message: 'Error updating tag position' }; + logger.error('[updateConversationTag] Error updating conversation tag', error); + throw new Error('Error updating conversation tag'); } }; -module.exports = { - SAVED_TAG, - ConversationTag, - getConversationTags: async (user) => { - try { - const cTags = await ConversationTag.find({ user }).sort({ position: 1 }).lean(); - cTags.sort((a, b) => { - if (a.tag === SAVED_TAG) { - return -1; - } - if (b.tag === SAVED_TAG) { - return 1; - } - return 0; - }); +/** + * Adjusts positions of tags when a tag's position is changed. + * @param {string} user - The user ID. + * @param {number} oldPosition - The old position of the tag. + * @param {number} newPosition - The new position of the tag. + * @returns {Promise} + */ +const adjustPositions = async (user, oldPosition, newPosition) => { + if (oldPosition === newPosition) { + return; + } + + const update = oldPosition < newPosition ? { $inc: { position: -1 } } : { $inc: { position: 1 } }; + + await ConversationTag.updateMany( + { + user, + position: { + $gt: Math.min(oldPosition, newPosition), + $lte: Math.max(oldPosition, newPosition), + }, + }, + update, + ); +}; - return cTags; - } catch (error) { - logger.error('[getShare] Error getting share link', error); - return { message: 'Error getting share link' }; +/** + * Deletes a conversation tag. + * @param {string} user - The user ID. + * @param {string} tag - The tag to delete. + * @returns {Promise} The deleted tag. + */ +const deleteConversationTag = async (user, tag) => { + try { + const deletedTag = await ConversationTag.findOneAndDelete({ user, tag }).lean(); + if (!deletedTag) { + return null; } - }, - createConversationTag, - updateConversationTag: async (user, tag, data) => { - try { - const cTag = await ConversationTag.findOne({ user, tag }); - if (!cTag) { - return createConversationTag(user, data); - } + await Conversation.updateMany({ user, tags: tag }, { $pull: { tags: tag } }); - if (cTag.tag !== data.tag || cTag.description !== data.description) { - cTag.tag = data.tag; - cTag.description = data.description === undefined ? cTag.description : data.description; - await cTag.save(); - } + await ConversationTag.updateMany( + { user, position: { $gt: deletedTag.position } }, + { $inc: { position: -1 } }, + ); - if (data.position !== undefined && cTag.position !== data.position) { - await updateTagPosition(user, tag, data.position); - } + return deletedTag; + } catch (error) { + logger.error('[deleteConversationTag] Error deleting conversation tag', error); + throw new Error('Error deleting conversation tag'); + } +}; - // update conversation tags properties - replaceOrRemoveTagInConversations(user, tag, data.tag); - return await ConversationTag.findOne({ user, tag: data.tag }); - } catch (error) { - logger.error('[updateConversationTag] Error updating conversation tag', error); - return { message: 'Error updating conversation tag' }; +/** + * Updates tags for a specific conversation. + * @param {string} user - The user ID. + * @param {string} conversationId - The conversation ID. + * @param {string[]} tags - The new set of tags for the conversation. + * @returns {Promise} The updated list of tags for the conversation. + */ +const updateTagsForConversation = async (user, conversationId, tags) => { + try { + const conversation = await Conversation.findOne({ user, conversationId }).lean(); + if (!conversation) { + throw new Error('Conversation not found'); } - }, - deleteConversationTag: async (user, tag) => { - try { - const currentTag = await ConversationTag.findOne({ user, tag }); - if (!currentTag) { - return; - } + const oldTags = new Set(conversation.tags); + const newTags = new Set(tags); - await currentTag.deleteOne({ user, tag }); + const addedTags = [...newTags].filter((tag) => !oldTags.has(tag)); + const removedTags = [...oldTags].filter((tag) => !newTags.has(tag)); - await replaceOrRemoveTagInConversations(user, tag, null); - return currentTag; - } catch (error) { - logger.error('[deleteConversationTag] Error deleting conversation tag', error); - return { message: 'Error deleting conversation tag' }; - } - }, + const bulkOps = []; - updateTagsForConversation, - rebuildConversationTags: async (user) => { - try { - const conversations = await Conversation.find({ user }).select('tags'); - const tagCountMap = {}; - - // Count the occurrences of each tag - conversations.forEach((conversation) => { - conversation.tags.forEach((tag) => { - if (tagCountMap[tag]) { - tagCountMap[tag]++; - } else { - tagCountMap[tag] = 1; - } - }); + for (const tag of addedTags) { + bulkOps.push({ + updateOne: { + filter: { user, tag }, + update: { $inc: { count: 1 } }, + upsert: true, + }, }); + } - const tags = await ConversationTag.find({ user }).sort({ position: -1 }); - - // Update existing tags and add new tags - for (const [tag, count] of Object.entries(tagCountMap)) { - const existingTag = tags.find((t) => t.tag === tag); - if (existingTag) { - existingTag.count = count; - await existingTag.save(); - } else { - const newTag = new ConversationTag({ user, tag, count }); - tags.push(newTag); - await newTag.save(); - } - } + for (const tag of removedTags) { + bulkOps.push({ + updateOne: { + filter: { user, tag }, + update: { $inc: { count: -1 } }, + }, + }); + } - // Set count to 0 for tags that are not in the grouped tags - for (const tag of tags) { - if (!tagCountMap[tag.tag]) { - tag.count = 0; - await tag.save(); - } - } + if (bulkOps.length > 0) { + await ConversationTag.bulkWrite(bulkOps); + } - // Sort tags by position in descending order - tags.sort((a, b) => a.position - b.position); + const updatedConversation = ( + await Conversation.findOneAndUpdate( + { user, conversationId }, + { $set: { tags: [...newTags] } }, + { new: true }, + ) + ).toObject(); - // Move the tag with name "saved" to the first position - const savedTagIndex = tags.findIndex((tag) => tag.tag === SAVED_TAG); - if (savedTagIndex !== -1) { - const [savedTag] = tags.splice(savedTagIndex, 1); - tags.unshift(savedTag); - } + return updatedConversation.tags; + } catch (error) { + logger.error('[updateTagsForConversation] Error updating tags', error); + throw new Error('Error updating tags for conversation'); + } +}; - // Reassign positions starting from 0 - tags.forEach((tag, index) => { - tag.position = index; - tag.save(); - }); - return tags; - } catch (error) { - logger.error('[rearrangeTags] Error rearranging tags', error); - return { message: 'Error rearranging tags' }; - } - }, +module.exports = { + getConversationTags, + createConversationTag, + updateConversationTag, + deleteConversationTag, + updateTagsForConversation, }; diff --git a/api/models/schema/convoSchema.js b/api/models/schema/convoSchema.js index 92f5589b13e..7b020e33097 100644 --- a/api/models/schema/convoSchema.js +++ b/api/models/schema/convoSchema.js @@ -61,6 +61,7 @@ if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) { } convoSchema.index({ createdAt: 1, updatedAt: 1 }); +convoSchema.index({ conversationId: 1, user: 1 }, { unique: true }); const Conversation = mongoose.models.Conversation || mongoose.model('Conversation', convoSchema); diff --git a/api/models/tx.js b/api/models/tx.js index 778d2ce048b..3018de41745 100644 --- a/api/models/tx.js +++ b/api/models/tx.js @@ -1,38 +1,74 @@ const { matchModelName } = require('../utils'); const defaultRate = 6; +/** AWS Bedrock pricing */ +const bedrockValues = { + 'anthropic.claude-3-haiku-20240307-v1:0': { prompt: 0.25, completion: 1.25 }, + 'anthropic.claude-3-sonnet-20240229-v1:0': { prompt: 3.0, completion: 15.0 }, + 'anthropic.claude-3-opus-20240229-v1:0': { prompt: 15.0, completion: 75.0 }, + 'anthropic.claude-3-5-sonnet-20240620-v1:0': { prompt: 3.0, completion: 15.0 }, + 'anthropic.claude-v2:1': { prompt: 8.0, completion: 24.0 }, + 'anthropic.claude-instant-v1': { prompt: 0.8, completion: 2.4 }, + 'meta.llama2-13b-chat-v1': { prompt: 0.75, completion: 1.0 }, + 'meta.llama2-70b-chat-v1': { prompt: 1.95, completion: 2.56 }, + 'meta.llama3-8b-instruct-v1:0': { prompt: 0.3, completion: 0.6 }, + 'meta.llama3-70b-instruct-v1:0': { prompt: 2.65, completion: 3.5 }, + 'meta.llama3-1-8b-instruct-v1:0': { prompt: 0.3, completion: 0.6 }, + 'meta.llama3-1-70b-instruct-v1:0': { prompt: 2.65, completion: 3.5 }, + 'meta.llama3-1-405b-instruct-v1:0': { prompt: 5.32, completion: 16.0 }, + 'mistral.mistral-7b-instruct-v0:2': { prompt: 0.15, completion: 0.2 }, + 'mistral.mistral-small-2402-v1:0': { prompt: 0.15, completion: 0.2 }, + 'mistral.mixtral-8x7b-instruct-v0:1': { prompt: 0.45, completion: 0.7 }, + 'mistral.mistral-large-2402-v1:0': { prompt: 4.0, completion: 12.0 }, + 'mistral.mistral-large-2407-v1:0': { prompt: 3.0, completion: 9.0 }, + 'cohere.command-text-v14': { prompt: 1.5, completion: 2.0 }, + 'cohere.command-light-text-v14': { prompt: 0.3, completion: 0.6 }, + 'cohere.command-r-v1:0': { prompt: 0.5, completion: 1.5 }, + 'cohere.command-r-plus-v1:0': { prompt: 3.0, completion: 15.0 }, + 'ai21.j2-mid-v1': { prompt: 12.5, completion: 12.5 }, + 'ai21.j2-ultra-v1': { prompt: 18.8, completion: 18.8 }, + 'amazon.titan-text-lite-v1': { prompt: 0.15, completion: 0.2 }, + 'amazon.titan-text-express-v1': { prompt: 0.2, completion: 0.6 }, +}; + +for (const [key, value] of Object.entries(bedrockValues)) { + bedrockValues[`bedrock/${key}`] = value; +} + /** * Mapping of model token sizes to their respective multipliers for prompt and completion. * The rates are 1 USD per 1M tokens. * @type {Object.} */ -const tokenValues = { - '8k': { prompt: 30, completion: 60 }, - '32k': { prompt: 60, completion: 120 }, - '4k': { prompt: 1.5, completion: 2 }, - '16k': { prompt: 3, completion: 4 }, - 'gpt-3.5-turbo-1106': { prompt: 1, completion: 2 }, - 'gpt-4o-mini': { prompt: 0.15, completion: 0.6 }, - 'gpt-4o': { prompt: 5, completion: 15 }, - 'gpt-4-1106': { prompt: 10, completion: 30 }, - 'gpt-3.5-turbo-0125': { prompt: 0.5, completion: 1.5 }, - 'claude-3-opus': { prompt: 15, completion: 75 }, - 'claude-3-sonnet': { prompt: 3, completion: 15 }, - 'claude-3-5-sonnet': { prompt: 3, completion: 15 }, - 'claude-3-haiku': { prompt: 0.25, completion: 1.25 }, - 'claude-2.1': { prompt: 8, completion: 24 }, - 'claude-2': { prompt: 8, completion: 24 }, - 'claude-': { prompt: 0.8, completion: 2.4 }, - 'command-r-plus': { prompt: 3, completion: 15 }, - 'command-r': { prompt: 0.5, completion: 1.5 }, - /* cohere doesn't have rates for the older command models, +const tokenValues = Object.assign( + { + '8k': { prompt: 30, completion: 60 }, + '32k': { prompt: 60, completion: 120 }, + '4k': { prompt: 1.5, completion: 2 }, + '16k': { prompt: 3, completion: 4 }, + 'gpt-3.5-turbo-1106': { prompt: 1, completion: 2 }, + 'gpt-4o-2024-08-06': { prompt: 2.5, completion: 10 }, + 'gpt-4o-mini': { prompt: 0.15, completion: 0.6 }, + 'gpt-4o': { prompt: 5, completion: 15 }, + 'gpt-4-1106': { prompt: 10, completion: 30 }, + 'gpt-3.5-turbo-0125': { prompt: 0.5, completion: 1.5 }, + 'claude-3-opus': { prompt: 15, completion: 75 }, + 'claude-3-sonnet': { prompt: 3, completion: 15 }, + 'claude-3-5-sonnet': { prompt: 3, completion: 15 }, + 'claude-3-haiku': { prompt: 0.25, completion: 1.25 }, + 'claude-2.1': { prompt: 8, completion: 24 }, + 'claude-2': { prompt: 8, completion: 24 }, + 'claude-': { prompt: 0.8, completion: 2.4 }, + 'command-r-plus': { prompt: 3, completion: 15 }, + 'command-r': { prompt: 0.5, completion: 1.5 }, + /* cohere doesn't have rates for the older command models, so this was from https://artificialanalysis.ai/models/command-light/providers */ - command: { prompt: 0.38, completion: 0.38 }, - // 'gemini-1.5': { prompt: 7, completion: 21 }, // May 2nd, 2024 pricing - // 'gemini': { prompt: 0.5, completion: 1.5 }, // May 2nd, 2024 pricing - 'gemini-1.5': { prompt: 0, completion: 0 }, // currently free - gemini: { prompt: 0, completion: 0 }, // currently free -}; + command: { prompt: 0.38, completion: 0.38 }, + 'gemini-1.5': { prompt: 7, completion: 21 }, // May 2nd, 2024 pricing + gemini: { prompt: 0.5, completion: 1.5 }, // May 2nd, 2024 pricing + }, + bedrockValues, +); /** * Retrieves the key associated with a given model name. @@ -55,6 +91,8 @@ const getValueKey = (model, endpoint) => { return 'gpt-3.5-turbo-1106'; } else if (modelName.includes('gpt-3.5')) { return '4k'; + } else if (modelName.includes('gpt-4o-2024-08-06')) { + return 'gpt-4o-2024-08-06'; } else if (modelName.includes('gpt-4o-mini')) { return 'gpt-4o-mini'; } else if (modelName.includes('gpt-4o')) { diff --git a/api/models/tx.spec.js b/api/models/tx.spec.js index d59a79a33e7..bcb63d5a255 100644 --- a/api/models/tx.spec.js +++ b/api/models/tx.spec.js @@ -53,6 +53,14 @@ describe('getValueKey', () => { expect(getValueKey('gpt-4o-mini-2024-07-18')).toBe('gpt-4o-mini'); expect(getValueKey('openai/gpt-4o-mini')).toBe('gpt-4o-mini'); expect(getValueKey('gpt-4o-mini-0718')).toBe('gpt-4o-mini'); + expect(getValueKey('gpt-4o-2024-08-06-0718')).not.toBe('gpt-4o'); + }); + + it('should return "gpt-4o-2024-08-06" for model type of "gpt-4o-2024-08-06"', () => { + expect(getValueKey('gpt-4o-2024-08-06-2024-07-18')).toBe('gpt-4o-2024-08-06'); + expect(getValueKey('openai/gpt-4o-2024-08-06')).toBe('gpt-4o-2024-08-06'); + expect(getValueKey('gpt-4o-2024-08-06-0718')).toBe('gpt-4o-2024-08-06'); + expect(getValueKey('gpt-4o-2024-08-06-0718')).not.toBe('gpt-4o'); }); it('should return "claude-3-5-sonnet" for model type of "claude-3-5-sonnet-"', () => { @@ -152,3 +160,68 @@ describe('getMultiplier', () => { ); }); }); + +describe('AWS Bedrock Model Tests', () => { + const awsModels = [ + 'anthropic.claude-3-haiku-20240307-v1:0', + 'anthropic.claude-3-sonnet-20240229-v1:0', + 'anthropic.claude-3-opus-20240229-v1:0', + 'anthropic.claude-3-5-sonnet-20240620-v1:0', + 'anthropic.claude-v2:1', + 'anthropic.claude-instant-v1', + 'meta.llama2-13b-chat-v1', + 'meta.llama2-70b-chat-v1', + 'meta.llama3-8b-instruct-v1:0', + 'meta.llama3-70b-instruct-v1:0', + 'meta.llama3-1-8b-instruct-v1:0', + 'meta.llama3-1-70b-instruct-v1:0', + 'meta.llama3-1-405b-instruct-v1:0', + 'mistral.mistral-7b-instruct-v0:2', + 'mistral.mistral-small-2402-v1:0', + 'mistral.mixtral-8x7b-instruct-v0:1', + 'mistral.mistral-large-2402-v1:0', + 'mistral.mistral-large-2407-v1:0', + 'cohere.command-text-v14', + 'cohere.command-light-text-v14', + 'cohere.command-r-v1:0', + 'cohere.command-r-plus-v1:0', + 'ai21.j2-mid-v1', + 'ai21.j2-ultra-v1', + 'amazon.titan-text-lite-v1', + 'amazon.titan-text-express-v1', + ]; + + it('should return the correct prompt multipliers for all models', () => { + const results = awsModels.map((model) => { + const multiplier = getMultiplier({ valueKey: model, tokenType: 'prompt' }); + return multiplier === tokenValues[model].prompt; + }); + expect(results.every(Boolean)).toBe(true); + }); + + it('should return the correct completion multipliers for all models', () => { + const results = awsModels.map((model) => { + const multiplier = getMultiplier({ valueKey: model, tokenType: 'completion' }); + return multiplier === tokenValues[model].completion; + }); + expect(results.every(Boolean)).toBe(true); + }); + + it('should return the correct prompt multipliers for all models with Bedrock prefix', () => { + const results = awsModels.map((model) => { + const modelName = `bedrock/${model}`; + const multiplier = getMultiplier({ valueKey: modelName, tokenType: 'prompt' }); + return multiplier === tokenValues[model].prompt; + }); + expect(results.every(Boolean)).toBe(true); + }); + + it('should return the correct completion multipliers for all models with Bedrock prefix', () => { + const results = awsModels.map((model) => { + const modelName = `bedrock/${model}`; + const multiplier = getMultiplier({ valueKey: modelName, tokenType: 'completion' }); + return multiplier === tokenValues[model].completion; + }); + expect(results.every(Boolean)).toBe(true); + }); +}); diff --git a/api/package.json b/api/package.json index 75502baf28c..1b42197cdc6 100644 --- a/api/package.json +++ b/api/package.json @@ -1,6 +1,6 @@ { "name": "@librechat/backend", - "version": "0.7.4-rc1", + "version": "0.7.4", "description": "", "scripts": { "start": "echo 'please run this from the root directory'", diff --git a/api/server/middleware/concurrentLimiter.js b/api/server/middleware/concurrentLimiter.js index 402152eb029..58ff689a0b1 100644 --- a/api/server/middleware/concurrentLimiter.js +++ b/api/server/middleware/concurrentLimiter.js @@ -1,5 +1,7 @@ -const clearPendingReq = require('../../cache/clearPendingReq'); -const { logViolation, getLogStores } = require('../../cache'); +const { Time } = require('librechat-data-provider'); +const clearPendingReq = require('~/cache/clearPendingReq'); +const { logViolation, getLogStores } = require('~/cache'); +const { isEnabled } = require('~/server/utils'); const denyRequest = require('./denyRequest'); const { @@ -7,7 +9,6 @@ const { CONCURRENT_MESSAGE_MAX = 1, CONCURRENT_VIOLATION_SCORE: score, } = process.env ?? {}; -const ttl = 1000 * 60 * 1; /** * Middleware to limit concurrent requests for a user. @@ -38,7 +39,7 @@ const concurrentLimiter = async (req, res, next) => { const limit = Math.max(CONCURRENT_MESSAGE_MAX, 1); const type = 'concurrent'; - const key = `${USE_REDIS ? namespace : ''}:${userId}`; + const key = `${isEnabled(USE_REDIS) ? namespace : ''}:${userId}`; const pendingRequests = +((await cache.get(key)) ?? 0); if (pendingRequests >= limit) { @@ -51,7 +52,7 @@ const concurrentLimiter = async (req, res, next) => { await logViolation(req, res, type, errorMessage, score); return await denyRequest(req, res, errorMessage); } else { - await cache.set(key, pendingRequests + 1, ttl); + await cache.set(key, pendingRequests + 1, Time.ONE_MINUTE); } // Ensure the requests are removed from the store once the request is done diff --git a/api/server/middleware/index.js b/api/server/middleware/index.js index 75aab961b59..8d3fff58ff0 100644 --- a/api/server/middleware/index.js +++ b/api/server/middleware/index.js @@ -14,6 +14,7 @@ const requireJwtAuth = require('./requireJwtAuth'); const validateModel = require('./validateModel'); const moderateText = require('./moderateText'); const setHeaders = require('./setHeaders'); +const validate = require('./validate'); const limiters = require('./limiters'); const uaParser = require('./uaParser'); const checkBan = require('./checkBan'); @@ -22,6 +23,7 @@ const roles = require('./roles'); module.exports = { ...abortMiddleware, + ...validate, ...limiters, ...roles, noIndex, diff --git a/api/server/middleware/validate/convoAccess.js b/api/server/middleware/validate/convoAccess.js new file mode 100644 index 00000000000..43cca0097d3 --- /dev/null +++ b/api/server/middleware/validate/convoAccess.js @@ -0,0 +1,73 @@ +const { Constants, ViolationTypes, Time } = require('librechat-data-provider'); +const { searchConversation } = require('~/models/Conversation'); +const denyRequest = require('~/server/middleware/denyRequest'); +const { logViolation, getLogStores } = require('~/cache'); +const { isEnabled } = require('~/server/utils'); + +const { USE_REDIS, CONVO_ACCESS_VIOLATION_SCORE: score = 0 } = process.env ?? {}; + +/** + * Middleware to validate user's authorization for a conversation. + * + * This middleware checks if a user has the right to access a specific conversation. + * If the user doesn't have access, an error is returned. If the conversation doesn't exist, + * a not found error is returned. If the access is valid, the middleware allows the request to proceed. + * If the `cache` store is not available, the middleware will skip its logic. + * + * @function + * @param {Express.Request} req - Express request object containing user information. + * @param {Express.Response} res - Express response object. + * @param {function} next - Express next middleware function. + * @throws {Error} Throws an error if the user doesn't have access to the conversation. + */ +const validateConvoAccess = async (req, res, next) => { + const namespace = ViolationTypes.CONVO_ACCESS; + const cache = getLogStores(namespace); + + const conversationId = req.body.conversationId; + + if (!conversationId || conversationId === Constants.NEW_CONVO) { + return next(); + } + + const userId = req.user?.id ?? req.user?._id ?? ''; + const type = ViolationTypes.CONVO_ACCESS; + const key = `${isEnabled(USE_REDIS) ? namespace : ''}:${userId}:${conversationId}`; + + try { + if (cache) { + const cachedAccess = await cache.get(key); + if (cachedAccess === 'authorized') { + return next(); + } + } + + const conversation = await searchConversation(conversationId); + + if (!conversation) { + return next(); + } + + if (conversation.user !== userId) { + const errorMessage = { + type, + error: 'User not authorized for this conversation', + }; + + if (cache) { + await logViolation(req, res, type, errorMessage, score); + } + return await denyRequest(req, res, errorMessage); + } + + if (cache) { + await cache.set(key, 'authorized', Time.TEN_MINUTES); + } + next(); + } catch (error) { + console.error('Error validating conversation access:', error); + res.status(500).json({ error: 'Internal server error' }); + } +}; + +module.exports = validateConvoAccess; diff --git a/api/server/middleware/validate/index.js b/api/server/middleware/validate/index.js new file mode 100644 index 00000000000..ce476e747f3 --- /dev/null +++ b/api/server/middleware/validate/index.js @@ -0,0 +1,4 @@ +const validateConvoAccess = require('./convoAccess'); +module.exports = { + validateConvoAccess, +}; diff --git a/api/server/routes/ask/index.js b/api/server/routes/ask/index.js index b5156ed8d10..fb737d3a746 100644 --- a/api/server/routes/ask/index.js +++ b/api/server/routes/ask/index.js @@ -12,9 +12,10 @@ const { uaParser, checkBan, requireJwtAuth, - concurrentLimiter, messageIpLimiter, + concurrentLimiter, messageUserLimiter, + validateConvoAccess, } = require('~/server/middleware'); const { LIMIT_CONCURRENT_MESSAGES, LIMIT_MESSAGE_IP, LIMIT_MESSAGE_USER } = process.env ?? {}; @@ -37,6 +38,8 @@ if (isEnabled(LIMIT_MESSAGE_USER)) { router.use(messageUserLimiter); } +router.use(validateConvoAccess); + router.use([`/${EModelEndpoint.azureOpenAI}`, `/${EModelEndpoint.openAI}`], openAI); router.use(`/${EModelEndpoint.chatGPTBrowser}`, askChatGPTBrowser); router.use(`/${EModelEndpoint.gptPlugins}`, gptPlugins); diff --git a/api/server/routes/assistants/chatV1.js b/api/server/routes/assistants/chatV1.js index 13386c6c85c..36ed6d49e09 100644 --- a/api/server/routes/assistants/chatV1.js +++ b/api/server/routes/assistants/chatV1.js @@ -8,6 +8,7 @@ const { // validateEndpoint, buildEndpointOption, } = require('~/server/middleware'); +const validateConvoAccess = require('~/server/middleware/validate/convoAccess'); const validateAssistant = require('~/server/middleware/assistants/validate'); const chatController = require('~/server/controllers/assistants/chatV1'); @@ -21,6 +22,14 @@ router.post('/abort', handleAbort()); * @param {express.Response} res - The response object, used to send back a response. * @returns {void} */ -router.post('/', validateModel, buildEndpointOption, validateAssistant, setHeaders, chatController); +router.post( + '/', + validateModel, + buildEndpointOption, + validateAssistant, + validateConvoAccess, + setHeaders, + chatController, +); module.exports = router; diff --git a/api/server/routes/assistants/chatV2.js b/api/server/routes/assistants/chatV2.js index 36c29f4bc02..e50994e9bc5 100644 --- a/api/server/routes/assistants/chatV2.js +++ b/api/server/routes/assistants/chatV2.js @@ -8,6 +8,7 @@ const { // validateEndpoint, buildEndpointOption, } = require('~/server/middleware'); +const validateConvoAccess = require('~/server/middleware/validate/convoAccess'); const validateAssistant = require('~/server/middleware/assistants/validate'); const chatController = require('~/server/controllers/assistants/chatV2'); @@ -21,6 +22,14 @@ router.post('/abort', handleAbort()); * @param {express.Response} res - The response object, used to send back a response. * @returns {void} */ -router.post('/', validateModel, buildEndpointOption, validateAssistant, setHeaders, chatController); +router.post( + '/', + validateModel, + buildEndpointOption, + validateAssistant, + validateConvoAccess, + setHeaders, + chatController, +); module.exports = router; diff --git a/api/server/routes/assistants/index.js b/api/server/routes/assistants/index.js index 6613177e7ba..9640b37b39c 100644 --- a/api/server/routes/assistants/index.js +++ b/api/server/routes/assistants/index.js @@ -1,13 +1,6 @@ const express = require('express'); const router = express.Router(); -const { - uaParser, - checkBan, - requireJwtAuth, - // concurrentLimiter, - // messageIpLimiter, - // messageUserLimiter, -} = require('~/server/middleware'); +const { uaParser, checkBan, requireJwtAuth } = require('~/server/middleware'); const v1 = require('./v1'); const chatV1 = require('./chatV1'); diff --git a/api/server/routes/convos.js b/api/server/routes/convos.js index 8757ce76eae..14db4755685 100644 --- a/api/server/routes/convos.js +++ b/api/server/routes/convos.js @@ -175,8 +175,17 @@ router.post('/fork', async (req, res) => { }); router.put('/tags/:conversationId', async (req, res) => { - const tag = await updateTagsForConversation(req.user.id, req.params.conversationId, req.body); - res.status(200).json(tag); + try { + const conversationTags = await updateTagsForConversation( + req.user.id, + req.params.conversationId, + req.body.tags, + ); + res.status(200).json(conversationTags); + } catch (error) { + logger.error('Error updating conversation tags', error); + res.status(500).send('Error updating conversation tags'); + } }); module.exports = router; diff --git a/api/server/routes/edit/index.js b/api/server/routes/edit/index.js index fa19f9effdc..f1d47af3f99 100644 --- a/api/server/routes/edit/index.js +++ b/api/server/routes/edit/index.js @@ -13,6 +13,7 @@ const { messageIpLimiter, concurrentLimiter, messageUserLimiter, + validateConvoAccess, } = require('~/server/middleware'); const { LIMIT_CONCURRENT_MESSAGES, LIMIT_MESSAGE_IP, LIMIT_MESSAGE_USER } = process.env ?? {}; @@ -35,6 +36,8 @@ if (isEnabled(LIMIT_MESSAGE_USER)) { router.use(messageUserLimiter); } +router.use(validateConvoAccess); + router.use([`/${EModelEndpoint.azureOpenAI}`, `/${EModelEndpoint.openAI}`], openAI); router.use(`/${EModelEndpoint.gptPlugins}`, gptPlugins); router.use(`/${EModelEndpoint.anthropic}`, anthropic); diff --git a/api/server/routes/files/files.js b/api/server/routes/files/files.js index 565893af3dc..1804244ae5a 100644 --- a/api/server/routes/files/files.js +++ b/api/server/routes/files/files.js @@ -1,13 +1,18 @@ const fs = require('fs').promises; const express = require('express'); -const { isUUID, checkOpenAIStorage } = require('librechat-data-provider'); +const { + isUUID, + checkOpenAIStorage, + FileSources, + EModelEndpoint, +} = require('librechat-data-provider'); const { filterFile, processFileUpload, processDeleteRequest, } = require('~/server/services/Files/process'); -const { initializeClient } = require('~/server/services/Endpoints/assistants'); const { getStrategyFunctions } = require('~/server/services/Files/strategies'); +const { getOpenAIClient } = require('~/server/controllers/assistants/helpers'); const { getFiles } = require('~/models/File'); const { logger } = require('~/config'); @@ -113,7 +118,15 @@ router.get('/download/:userId/:file_id', async (req, res) => { if (checkOpenAIStorage(file.source)) { req.body = { model: file.model }; - const { openai } = await initializeClient({ req, res }); + const endpointMap = { + [FileSources.openai]: EModelEndpoint.assistants, + [FileSources.azure]: EModelEndpoint.azureAssistants, + }; + const { openai } = await getOpenAIClient({ + req, + res, + overrideEndpoint: endpointMap[file.source], + }); logger.debug(`Downloading file ${file_id} from OpenAI`); passThrough = await getDownloadStream(file_id, openai); setHeaders(); diff --git a/api/server/routes/tags.js b/api/server/routes/tags.js index 0d4d85e3eef..289ee5c8f8a 100644 --- a/api/server/routes/tags.js +++ b/api/server/routes/tags.js @@ -1,44 +1,88 @@ const express = require('express'); - const { getConversationTags, updateConversationTag, createConversationTag, deleteConversationTag, - rebuildConversationTags, } = require('~/models/ConversationTag'); const requireJwtAuth = require('~/server/middleware/requireJwtAuth'); const router = express.Router(); router.use(requireJwtAuth); +/** + * GET / + * Retrieves all conversation tags for the authenticated user. + * @param {Object} req - Express request object + * @param {Object} res - Express response object + */ router.get('/', async (req, res) => { - const tags = await getConversationTags(req.user.id); - - if (tags) { - res.status(200).json(tags); - } else { - res.status(404).end(); + try { + const tags = await getConversationTags(req.user.id); + if (tags) { + res.status(200).json(tags); + } else { + res.status(404).end(); + } + } catch (error) { + console.error('Error getting conversation tags:', error); + res.status(500).json({ error: 'Internal server error' }); } }); +/** + * POST / + * Creates a new conversation tag for the authenticated user. + * @param {Object} req - Express request object + * @param {Object} res - Express response object + */ router.post('/', async (req, res) => { - const tag = await createConversationTag(req.user.id, req.body); - res.status(200).json(tag); -}); - -router.post('/rebuild', async (req, res) => { - const tag = await rebuildConversationTags(req.user.id); - res.status(200).json(tag); + try { + const tag = await createConversationTag(req.user.id, req.body); + res.status(200).json(tag); + } catch (error) { + console.error('Error creating conversation tag:', error); + res.status(500).json({ error: 'Internal server error' }); + } }); +/** + * PUT /:tag + * Updates an existing conversation tag for the authenticated user. + * @param {Object} req - Express request object + * @param {Object} res - Express response object + */ router.put('/:tag', async (req, res) => { - const tag = await updateConversationTag(req.user.id, req.params.tag, req.body); - res.status(200).json(tag); + try { + const tag = await updateConversationTag(req.user.id, req.params.tag, req.body); + if (tag) { + res.status(200).json(tag); + } else { + res.status(404).json({ error: 'Tag not found' }); + } + } catch (error) { + console.error('Error updating conversation tag:', error); + res.status(500).json({ error: 'Internal server error' }); + } }); +/** + * DELETE /:tag + * Deletes a conversation tag for the authenticated user. + * @param {Object} req - Express request object + * @param {Object} res - Express response object + */ router.delete('/:tag', async (req, res) => { - const tag = await deleteConversationTag(req.user.id, req.params.tag); - res.status(200).json(tag); + try { + const tag = await deleteConversationTag(req.user.id, req.params.tag); + if (tag) { + res.status(200).json(tag); + } else { + res.status(404).json({ error: 'Tag not found' }); + } + } catch (error) { + console.error('Error deleting conversation tag:', error); + res.status(500).json({ error: 'Internal server error' }); + } }); module.exports = router; diff --git a/api/server/services/AuthService.js b/api/server/services/AuthService.js index 9664a7e67c9..eb2dd63e6be 100644 --- a/api/server/services/AuthService.js +++ b/api/server/services/AuthService.js @@ -1,4 +1,5 @@ const bcrypt = require('bcryptjs'); +const { webcrypto } = require('node:crypto'); const { SystemRoles, errorsToString } = require('librechat-data-provider'); const { findUser, @@ -53,14 +54,23 @@ const logoutUser = async (userId, refreshToken) => { } }; +/** + * Creates Token and corresponding Hash for verification + * @returns {[string, string]} + */ +const createTokenHash = () => { + const token = Buffer.from(webcrypto.getRandomValues(new Uint8Array(32))).toString('hex'); + const hash = bcrypt.hashSync(token, 10); + return [token, hash]; +}; + /** * Send Verification Email * @param {Partial & { _id: ObjectId, email: string, name: string}} user * @returns {Promise} */ const sendVerificationEmail = async (user) => { - let verifyToken = crypto.randomBytes(32).toString('hex'); - const hash = bcrypt.hashSync(verifyToken, 10); + const [verifyToken, hash] = createTokenHash(); const verificationLink = `${ domains.client @@ -226,8 +236,7 @@ const requestPasswordReset = async (req) => { await token.deleteOne(); } - let resetToken = crypto.randomBytes(32).toString('hex'); - const hash = bcrypt.hashSync(resetToken, 10); + const [resetToken, hash] = createTokenHash(); await new Token({ userId: user._id, @@ -365,8 +374,7 @@ const resendVerificationEmail = async (req) => { return { status: 200, message: genericVerificationMessage }; } - let verifyToken = crypto.randomBytes(32).toString('hex'); - const hash = bcrypt.hashSync(verifyToken, 10); + const [verifyToken, hash] = createTokenHash(); const verificationLink = `${ domains.client diff --git a/api/server/services/Files/Audio/streamAudio.js b/api/server/services/Files/Audio/streamAudio.js index eb8134e9589..4d1157bd349 100644 --- a/api/server/services/Files/Audio/streamAudio.js +++ b/api/server/services/Files/Audio/streamAudio.js @@ -1,5 +1,5 @@ const WebSocket = require('ws'); -const { CacheKeys } = require('librechat-data-provider'); +const { CacheKeys, findLastSeparatorIndex, SEPARATORS } = require('librechat-data-provider'); const { getLogStores } = require('~/cache'); /** @@ -71,25 +71,6 @@ function assembleQuery(parameters) { return query; } -const SEPARATORS = ['.', '?', '!', '۔', '。', '‥', ';', '¡', '¿', '\n']; - -/** - * - * @param {string} text - * @param {string[] | undefined} [separators] - * @returns - */ -function findLastSeparatorIndex(text, separators = SEPARATORS) { - let lastIndex = -1; - for (const separator of separators) { - const index = text.lastIndexOf(separator); - if (index > lastIndex) { - lastIndex = index; - } - } - return lastIndex; -} - const MAX_NOT_FOUND_COUNT = 6; const MAX_NO_CHANGE_COUNT = 10; diff --git a/api/server/services/Threads/manage.js b/api/server/services/Threads/manage.js index 9e7eee70b6b..920e68af554 100644 --- a/api/server/services/Threads/manage.js +++ b/api/server/services/Threads/manage.js @@ -11,7 +11,6 @@ const { recordMessage, getMessages } = require('~/models/Message'); const { saveConvo } = require('~/models/Conversation'); const spendTokens = require('~/models/spendTokens'); const { countTokens } = require('~/server/utils'); -const { logger } = require('~/config'); /** * Initializes a new thread or adds messages to an existing thread. @@ -516,80 +515,34 @@ const recordUsage = async ({ ); }; -/** - * Creates a replaceAnnotation function with internal state for tracking the index offset. - * - * @returns {function} The replaceAnnotation function with closure for index offset. - */ -function createReplaceAnnotation() { - let indexOffset = 0; - - /** - * Safely replaces the annotated text within the specified range denoted by start_index and end_index, - * after verifying that the text within that range matches the given annotation text. - * Proceeds with the replacement even if a mismatch is found, but logs a warning. - * - * @param {object} params The original text content. - * @param {string} params.currentText The current text content, with/without replacements. - * @param {number} params.start_index The starting index where replacement should begin. - * @param {number} params.end_index The ending index where replacement should end. - * @param {string} params.expectedText The text expected to be found in the specified range. - * @param {string} params.replacementText The text to insert in place of the existing content. - * @returns {string} The text with the replacement applied, regardless of text match. - */ - function replaceAnnotation({ - currentText, - start_index, - end_index, - expectedText, - replacementText, - }) { - const adjustedStartIndex = start_index + indexOffset; - const adjustedEndIndex = end_index + indexOffset; - - if ( - adjustedStartIndex < 0 || - adjustedEndIndex > currentText.length || - adjustedStartIndex > adjustedEndIndex - ) { - logger.warn(`Invalid range specified for annotation replacement. - Attempting replacement with \`replace\` method instead... - length: ${currentText.length} - start_index: ${adjustedStartIndex} - end_index: ${adjustedEndIndex}`); - return currentText.replace(expectedText, replacementText); - } - - if (currentText.substring(adjustedStartIndex, adjustedEndIndex) !== expectedText) { - return currentText.replace(expectedText, replacementText); - } +const uniqueCitationStart = '^====||==='; +const uniqueCitationEnd = '==|||||^'; - indexOffset += replacementText.length - (adjustedEndIndex - adjustedStartIndex); - return ( - currentText.slice(0, adjustedStartIndex) + - replacementText + - currentText.slice(adjustedEndIndex) - ); - } - - return replaceAnnotation; +/** Helper function to escape special characters in regex + * @param {string} string - The string to escape. + * @returns {string} The escaped string. + */ +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); } /** * Sorts, processes, and flattens messages to a single string. * - * @param {object} params - The OpenAI client instance. + * @param {object} params - The parameters for processing messages. * @param {OpenAIClient} params.openai - The OpenAI client instance. * @param {RunClient} params.client - The LibreChat client that manages the run: either refers to `OpenAI` or `StreamRunManager`. * @param {ThreadMessage[]} params.messages - An array of messages. - * @returns {Promise<{messages: ThreadMessage[], text: string}>} The sorted messages and the flattened text. + * @returns {Promise<{messages: ThreadMessage[], text: string, edited: boolean}>} The sorted messages, the flattened text, and whether it was edited. */ async function processMessages({ openai, client, messages = [] }) { const sorted = messages.sort((a, b) => a.created_at - b.created_at); let text = ''; let edited = false; - const sources = []; + const sources = new Map(); + const fileRetrievalPromises = []; + for (const message of sorted) { message.files = []; for (const content of message.content) { @@ -598,15 +551,21 @@ async function processMessages({ openai, client, messages = [] }) { const currentFileId = contentType?.file_id; if (type === ContentTypes.IMAGE_FILE && !client.processedFileIds.has(currentFileId)) { - const file = await retrieveAndProcessFile({ - openai, - client, - file_id: currentFileId, - basename: `${currentFileId}.png`, - }); - - client.processedFileIds.add(currentFileId); - message.files.push(file); + fileRetrievalPromises.push( + retrieveAndProcessFile({ + openai, + client, + file_id: currentFileId, + basename: `${currentFileId}.png`, + }) + .then((file) => { + client.processedFileIds.add(currentFileId); + message.files.push(file); + }) + .catch((error) => { + console.error(`Failed to retrieve file: ${error.message}`); + }), + ); continue; } @@ -615,78 +574,110 @@ async function processMessages({ openai, client, messages = [] }) { /** @type {{ annotations: Annotation[] }} */ const { annotations } = contentType ?? {}; - // Process annotations if they exist if (!annotations?.length) { - text += currentText + ' '; + text += currentText; continue; } - const originalText = currentText; - text += originalText; - - const replaceAnnotation = createReplaceAnnotation(); - - logger.debug('[processMessages] Processing annotations:', annotations); - for (const annotation of annotations) { - let file; + const replacements = []; + const annotationPromises = annotations.map(async (annotation) => { const type = annotation.type; const annotationType = annotation[type]; const file_id = annotationType?.file_id; const alreadyProcessed = client.processedFileIds.has(file_id); - const replaceCurrentAnnotation = (replacementText = '') => { - const { start_index, end_index, text: expectedText } = annotation; - currentText = replaceAnnotation({ - originalText, - currentText, - start_index, - end_index, - expectedText, - replacementText, - }); - edited = true; - }; - - if (alreadyProcessed) { - const { file_id } = annotationType || {}; - file = await retrieveAndProcessFile({ openai, client, file_id, unknownType: true }); - } else if (type === AnnotationTypes.FILE_PATH) { - const basename = path.basename(annotation.text); - file = await retrieveAndProcessFile({ - openai, - client, - file_id, - basename, - }); - replaceCurrentAnnotation(file.filepath); - } else if (type === AnnotationTypes.FILE_CITATION) { - file = await retrieveAndProcessFile({ - openai, - client, - file_id, - unknownType: true, - }); - sources.push(file.filename); - replaceCurrentAnnotation(`^${sources.length}^`); + let file; + let replacementText = ''; + + try { + if (alreadyProcessed) { + file = await retrieveAndProcessFile({ openai, client, file_id, unknownType: true }); + } else if (type === AnnotationTypes.FILE_PATH) { + const basename = path.basename(annotation.text); + file = await retrieveAndProcessFile({ + openai, + client, + file_id, + basename, + }); + replacementText = file.filepath; + } else if (type === AnnotationTypes.FILE_CITATION && file_id) { + file = await retrieveAndProcessFile({ + openai, + client, + file_id, + unknownType: true, + }); + if (file && file.filename) { + if (!sources.has(file.filename)) { + sources.set(file.filename, sources.size + 1); + } + replacementText = `${uniqueCitationStart}${sources.get( + file.filename, + )}${uniqueCitationEnd}`; + } + } + + if (file && replacementText) { + replacements.push({ + start: annotation.start_index, + end: annotation.end_index, + text: replacementText, + }); + edited = true; + if (!alreadyProcessed) { + client.processedFileIds.add(file_id); + message.files.push(file); + } + } + } catch (error) { + console.error(`Failed to process annotation: ${error.message}`); } + }); - text = currentText; + await Promise.all(annotationPromises); - if (!file) { - continue; - } - - client.processedFileIds.add(file_id); - message.files.push(file); + // Apply replacements in reverse order + replacements.sort((a, b) => b.start - a.start); + for (const { start, end, text: replacementText } of replacements) { + currentText = currentText.slice(0, start) + replacementText + currentText.slice(end); } + + text += currentText; } } - if (sources.length) { + await Promise.all(fileRetrievalPromises); + + // Handle adjacent identical citations with the unique format + const adjacentCitationRegex = new RegExp( + `${escapeRegExp(uniqueCitationStart)}(\\d+)${escapeRegExp( + uniqueCitationEnd, + )}(\\s*)${escapeRegExp(uniqueCitationStart)}(\\d+)${escapeRegExp(uniqueCitationEnd)}`, + 'g', + ); + text = text.replace(adjacentCitationRegex, (match, num1, space, num2) => { + return num1 === num2 + ? `${uniqueCitationStart}${num1}${uniqueCitationEnd}` + : `${uniqueCitationStart}${num1}${uniqueCitationEnd}${space}${uniqueCitationStart}${num2}${uniqueCitationEnd}`; + }); + + // Remove any remaining adjacent identical citations + const remainingAdjacentRegex = new RegExp( + `(${escapeRegExp(uniqueCitationStart)}(\\d+)${escapeRegExp(uniqueCitationEnd)})\\s*\\1+`, + 'g', + ); + text = text.replace(remainingAdjacentRegex, '$1'); + + // Replace the unique citation format with the final format + text = text.replace(new RegExp(escapeRegExp(uniqueCitationStart), 'g'), '^'); + text = text.replace(new RegExp(escapeRegExp(uniqueCitationEnd), 'g'), '^'); + + if (sources.size) { text += '\n\n'; - for (let i = 0; i < sources.length; i++) { - text += `^${i + 1}.^ ${sources[i]}${i === sources.length - 1 ? '' : '\n'}`; - } + Array.from(sources.entries()).forEach(([source, index], arrayIndex) => { + text += `^${index}.^ ${source}${arrayIndex === sources.size - 1 ? '' : '\n'}`; + }); } return { messages: sorted, text, edited }; diff --git a/api/server/services/Threads/processMessages.spec.js b/api/server/services/Threads/processMessages.spec.js new file mode 100644 index 00000000000..673b96bf7cd --- /dev/null +++ b/api/server/services/Threads/processMessages.spec.js @@ -0,0 +1,983 @@ +const { retrieveAndProcessFile } = require('~/server/services/Files/process'); +const { processMessages } = require('./manage'); + +jest.mock('~/server/services/Files/process', () => ({ + retrieveAndProcessFile: jest.fn(), +})); + +describe('processMessages', () => { + let openai, client; + + beforeEach(() => { + openai = {}; + client = { + processedFileIds: new Set(), + }; + jest.clearAllMocks(); + retrieveAndProcessFile.mockReset(); + }); + + test('handles normal case with single source', async () => { + const messages = [ + { + content: [ + { + type: 'text', + text: { + value: 'This is a test ^1^ and another^1^', + annotations: [ + { + type: 'file_citation', + start_index: 15, + end_index: 18, + file_citation: { file_id: 'file1' }, + }, + { + type: 'file_citation', + start_index: 30, + end_index: 33, + file_citation: { file_id: 'file1' }, + }, + ], + }, + }, + ], + created_at: 1, + }, + ]; + + retrieveAndProcessFile.mockResolvedValue({ filename: 'test.txt' }); + + const result = await processMessages({ openai, client, messages }); + + expect(result.text).toBe('This is a test ^1^ and another^1^\n\n^1.^ test.txt'); + expect(result.edited).toBe(true); + }); + + test('handles multiple different sources', async () => { + const messages = [ + { + content: [ + { + type: 'text', + text: { + value: 'This is a test ^1^ and another^2^', + annotations: [ + { + type: 'file_citation', + start_index: 15, + end_index: 18, + file_citation: { file_id: 'file1' }, + }, + { + type: 'file_citation', + start_index: 30, + end_index: 33, + file_citation: { file_id: 'file2' }, + }, + ], + }, + }, + ], + created_at: 1, + }, + ]; + + retrieveAndProcessFile + .mockResolvedValueOnce({ filename: 'test1.txt' }) + .mockResolvedValueOnce({ filename: 'test2.txt' }); + + const result = await processMessages({ openai, client, messages }); + + expect(result.text).toBe('This is a test ^1^ and another^2^\n\n^1.^ test1.txt\n^2.^ test2.txt'); + expect(result.edited).toBe(true); + }); + + test('handles file retrieval failure', async () => { + const messages = [ + { + content: [ + { + type: 'text', + text: { + value: 'This is a test ^1^', + annotations: [ + { + type: 'file_citation', + start_index: 15, + end_index: 18, + file_citation: { file_id: 'file1' }, + }, + ], + }, + }, + ], + created_at: 1, + }, + ]; + + retrieveAndProcessFile.mockRejectedValue(new Error('File not found')); + + const result = await processMessages({ openai, client, messages }); + + expect(result.text).toBe('This is a test ^1^'); + expect(result.edited).toBe(false); + }); + + test('handles citations without file ids', async () => { + const messages = [ + { + content: [ + { + type: 'text', + text: { + value: 'This is a test ^1^', + annotations: [{ type: 'file_citation', start_index: 15, end_index: 18 }], + }, + }, + ], + created_at: 1, + }, + ]; + + const result = await processMessages({ openai, client, messages }); + + expect(result.text).toBe('This is a test ^1^'); + expect(result.edited).toBe(false); + }); + + test('handles mixed valid and invalid citations', async () => { + const messages = [ + { + content: [ + { + type: 'text', + text: { + value: 'This is a test ^1^ and ^2^ and ^3^', + annotations: [ + { + type: 'file_citation', + start_index: 15, + end_index: 18, + file_citation: { file_id: 'file1' }, + }, + { type: 'file_citation', start_index: 23, end_index: 26 }, + { + type: 'file_citation', + start_index: 31, + end_index: 34, + file_citation: { file_id: 'file3' }, + }, + ], + }, + }, + ], + created_at: 1, + }, + ]; + + retrieveAndProcessFile + .mockResolvedValueOnce({ filename: 'test1.txt' }) + .mockResolvedValueOnce({ filename: 'test3.txt' }); + + const result = await processMessages({ openai, client, messages }); + + expect(result.text).toBe( + 'This is a test ^1^ and ^2^ and ^2^\n\n^1.^ test1.txt\n^2.^ test3.txt', + ); + expect(result.edited).toBe(true); + }); + + test('handles adjacent identical citations', async () => { + const messages = [ + { + content: [ + { + type: 'text', + text: { + value: 'This is a test ^1^^1^ and ^1^ ^1^', + annotations: [ + { + type: 'file_citation', + start_index: 15, + end_index: 18, + file_citation: { file_id: 'file1' }, + }, + { + type: 'file_citation', + start_index: 18, + end_index: 21, + file_citation: { file_id: 'file1' }, + }, + { + type: 'file_citation', + start_index: 26, + end_index: 29, + file_citation: { file_id: 'file1' }, + }, + { + type: 'file_citation', + start_index: 30, + end_index: 33, + file_citation: { file_id: 'file1' }, + }, + ], + }, + }, + ], + created_at: 1, + }, + ]; + + retrieveAndProcessFile.mockResolvedValue({ filename: 'test.txt' }); + + const result = await processMessages({ openai, client, messages }); + + expect(result.text).toBe('This is a test ^1^ and ^1^\n\n^1.^ test.txt'); + expect(result.edited).toBe(true); + }); + test('handles real data with multiple adjacent citations', async () => { + const messages = [ + { + id: 'msg_XXXXXXXXXXXXXXXXXXXX', + object: 'thread.message', + created_at: 1722980324, + assistant_id: 'asst_XXXXXXXXXXXXXXXXXXXX', + thread_id: 'thread_XXXXXXXXXXXXXXXXXXXX', + run_id: 'run_XXXXXXXXXXXXXXXXXXXX', + status: 'completed', + incomplete_details: null, + incomplete_at: null, + completed_at: 1722980331, + role: 'assistant', + content: [ + { + type: 'text', + text: { + value: + 'The text you have uploaded is from the book "Harry Potter and the Philosopher\'s Stone" by J.K. Rowling. It follows the story of a young boy named Harry Potter who discovers that he is a wizard on his eleventh birthday. Here are some key points of the narrative:\n\n1. **Discovery and Invitation to Hogwarts**: Harry learns that he is a wizard and receives an invitation to attend Hogwarts School of Witchcraft and Wizardry【11:2†source】【11:4†source】.\n\n2. **Shopping for Supplies**: Hagrid takes Harry to Diagon Alley to buy his school supplies, including his wand from Ollivander\'s【11:9†source】【11:14†source】.\n\n3. **Introduction to Hogwarts**: Harry is introduced to Hogwarts, the magical school where he will learn about magic and discover more about his own background【11:12†source】【11:18†source】.\n\n4. **Meeting Friends and Enemies**: At Hogwarts, Harry makes friends like Ron Weasley and Hermione Granger, and enemies like Draco Malfoy【11:16†source】.\n\n5. **Uncovering the Mystery**: Harry, along with Ron and Hermione, uncovers the mystery of the Philosopher\'s Stone and its connection to the dark wizard Voldemort【11:1†source】【11:10†source】【11:7†source】.\n\nThese points highlight Harry\'s initial experiences in the magical world and set the stage for his adventures at Hogwarts.', + annotations: [ + { + type: 'file_citation', + text: '【11:2†source】', + start_index: 420, + end_index: 433, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:4†source】', + start_index: 433, + end_index: 446, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:9†source】', + start_index: 578, + end_index: 591, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:14†source】', + start_index: 591, + end_index: 605, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:12†source】', + start_index: 767, + end_index: 781, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:18†source】', + start_index: 781, + end_index: 795, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:16†source】', + start_index: 935, + end_index: 949, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:1†source】', + start_index: 1114, + end_index: 1127, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:10†source】', + start_index: 1127, + end_index: 1141, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:7†source】', + start_index: 1141, + end_index: 1154, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + ], + }, + }, + ], + attachments: [], + metadata: {}, + files: [ + { + object: 'file', + id: 'file-XXXXXXXXXXXXXXXXXXXX', + purpose: 'assistants', + filename: 'hp1.txt', + bytes: 439742, + created_at: 1722962139, + status: 'processed', + status_details: null, + type: 'text/plain', + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + filepath: + 'https://api.openai.com/v1/files/XXXXXXXXXXXXXXXXXXXX/file-XXXXXXXXXXXXXXXXXXXX/hp1.txt', + usage: 1, + user: 'XXXXXXXXXXXXXXXXXXXX', + context: 'assistants', + source: 'openai', + model: 'gpt-4o', + }, + ], + }, + ]; + + retrieveAndProcessFile.mockResolvedValue({ filename: 'hp1.txt' }); + + const result = await processMessages({ + openai: {}, + client: { processedFileIds: new Set() }, + messages, + }); + + const expectedText = `The text you have uploaded is from the book "Harry Potter and the Philosopher's Stone" by J.K. Rowling. It follows the story of a young boy named Harry Potter who discovers that he is a wizard on his eleventh birthday. Here are some key points of the narrative: + +1. **Discovery and Invitation to Hogwarts**: Harry learns that he is a wizard and receives an invitation to attend Hogwarts School of Witchcraft and Wizardry^1^. + +2. **Shopping for Supplies**: Hagrid takes Harry to Diagon Alley to buy his school supplies, including his wand from Ollivander's^1^. + +3. **Introduction to Hogwarts**: Harry is introduced to Hogwarts, the magical school where he will learn about magic and discover more about his own background^1^. + +4. **Meeting Friends and Enemies**: At Hogwarts, Harry makes friends like Ron Weasley and Hermione Granger, and enemies like Draco Malfoy^1^. + +5. **Uncovering the Mystery**: Harry, along with Ron and Hermione, uncovers the mystery of the Philosopher's Stone and its connection to the dark wizard Voldemort^1^. + +These points highlight Harry's initial experiences in the magical world and set the stage for his adventures at Hogwarts. + +^1.^ hp1.txt`; + + expect(result.text).toBe(expectedText); + expect(result.edited).toBe(true); + }); + + test('handles real data with multiple adjacent citations with multiple sources', async () => { + const messages = [ + { + id: 'msg_XXXXXXXXXXXXXXXXXXXX', + object: 'thread.message', + created_at: 1722980324, + assistant_id: 'asst_XXXXXXXXXXXXXXXXXXXX', + thread_id: 'thread_XXXXXXXXXXXXXXXXXXXX', + run_id: 'run_XXXXXXXXXXXXXXXXXXXX', + status: 'completed', + incomplete_details: null, + incomplete_at: null, + completed_at: 1722980331, + role: 'assistant', + content: [ + { + type: 'text', + text: { + value: + 'The text you have uploaded is from the book "Harry Potter and the Philosopher\'s Stone" by J.K. Rowling. It follows the story of a young boy named Harry Potter who discovers that he is a wizard on his eleventh birthday. Here are some key points of the narrative:\n\n1. **Discovery and Invitation to Hogwarts**: Harry learns that he is a wizard and receives an invitation to attend Hogwarts School of Witchcraft and Wizardry【11:2†source】【11:4†source】.\n\n2. **Shopping for Supplies**: Hagrid takes Harry to Diagon Alley to buy his school supplies, including his wand from Ollivander\'s【11:9†source】【11:14†source】.\n\n3. **Introduction to Hogwarts**: Harry is introduced to Hogwarts, the magical school where he will learn about magic and discover more about his own background【11:12†source】【11:18†source】.\n\n4. **Meeting Friends and Enemies**: At Hogwarts, Harry makes friends like Ron Weasley and Hermione Granger, and enemies like Draco Malfoy【11:16†source】.\n\n5. **Uncovering the Mystery**: Harry, along with Ron and Hermione, uncovers the mystery of the Philosopher\'s Stone and its connection to the dark wizard Voldemort【11:1†source】【11:10†source】【11:7†source】.\n\nThese points highlight Harry\'s initial experiences in the magical world and set the stage for his adventures at Hogwarts.', + annotations: [ + { + type: 'file_citation', + text: '【11:2†source】', + start_index: 420, + end_index: 433, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:4†source】', + start_index: 433, + end_index: 446, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:9†source】', + start_index: 578, + end_index: 591, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:14†source】', + start_index: 591, + end_index: 605, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:12†source】', + start_index: 767, + end_index: 781, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:18†source】', + start_index: 781, + end_index: 795, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:16†source】', + start_index: 935, + end_index: 949, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:1†source】', + start_index: 1114, + end_index: 1127, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:10†source】', + start_index: 1127, + end_index: 1141, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_citation', + text: '【11:7†source】', + start_index: 1141, + end_index: 1154, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + ], + }, + }, + ], + attachments: [], + metadata: {}, + files: [ + { + object: 'file', + id: 'file-XXXXXXXXXXXXXXXXXXXX', + purpose: 'assistants', + filename: 'hp1.txt', + bytes: 439742, + created_at: 1722962139, + status: 'processed', + status_details: null, + type: 'text/plain', + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + filepath: + 'https://api.openai.com/v1/files/XXXXXXXXXXXXXXXXXXXX/file-XXXXXXXXXXXXXXXXXXXX/hp1.txt', + usage: 1, + user: 'XXXXXXXXXXXXXXXXXXXX', + context: 'assistants', + source: 'openai', + model: 'gpt-4o', + }, + ], + }, + ]; + + retrieveAndProcessFile.mockResolvedValue({ filename: 'hp1.txt' }); + + const result = await processMessages({ + openai: {}, + client: { processedFileIds: new Set() }, + messages, + }); + + const expectedText = `The text you have uploaded is from the book "Harry Potter and the Philosopher's Stone" by J.K. Rowling. It follows the story of a young boy named Harry Potter who discovers that he is a wizard on his eleventh birthday. Here are some key points of the narrative: + +1. **Discovery and Invitation to Hogwarts**: Harry learns that he is a wizard and receives an invitation to attend Hogwarts School of Witchcraft and Wizardry^1^. + +2. **Shopping for Supplies**: Hagrid takes Harry to Diagon Alley to buy his school supplies, including his wand from Ollivander's^1^. + +3. **Introduction to Hogwarts**: Harry is introduced to Hogwarts, the magical school where he will learn about magic and discover more about his own background^1^. + +4. **Meeting Friends and Enemies**: At Hogwarts, Harry makes friends like Ron Weasley and Hermione Granger, and enemies like Draco Malfoy^1^. + +5. **Uncovering the Mystery**: Harry, along with Ron and Hermione, uncovers the mystery of the Philosopher's Stone and its connection to the dark wizard Voldemort^1^. + +These points highlight Harry's initial experiences in the magical world and set the stage for his adventures at Hogwarts. + +^1.^ hp1.txt`; + + expect(result.text).toBe(expectedText); + expect(result.edited).toBe(true); + }); + + test('handles edge case with pre-existing citation-like text', async () => { + const messages = [ + { + content: [ + { + type: 'text', + text: { + value: + 'This is a test ^1^ with pre-existing citation-like text. Here\'s a real citation【11:2†source】.', + annotations: [ + { + type: 'file_citation', + text: '【11:2†source】', + start_index: 79, + end_index: 92, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + ], + }, + }, + ], + created_at: 1, + }, + ]; + + retrieveAndProcessFile.mockResolvedValue({ filename: 'test.txt' }); + + const result = await processMessages({ + openai: {}, + client: { processedFileIds: new Set() }, + messages, + }); + + const expectedText = + 'This is a test ^1^ with pre-existing citation-like text. Here\'s a real citation^1^.\n\n^1.^ test.txt'; + + expect(result.text).toBe(expectedText); + expect(result.edited).toBe(true); + }); + + test('handles FILE_PATH annotation type', async () => { + const messages = [ + { + content: [ + { + type: 'text', + text: { + value: 'Here is a file path: [file_path]', + annotations: [ + { + type: 'file_path', + text: '[file_path]', + start_index: 21, + end_index: 32, + file_path: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + ], + }, + }, + ], + created_at: 1, + }, + ]; + + retrieveAndProcessFile.mockResolvedValue({ + filename: 'test.txt', + filepath: '/path/to/test.txt', + }); + + const result = await processMessages({ + openai: {}, + client: { processedFileIds: new Set() }, + messages, + }); + + const expectedText = 'Here is a file path: /path/to/test.txt'; + + expect(result.text).toBe(expectedText); + expect(result.edited).toBe(true); + }); + + test('handles FILE_CITATION annotation type', async () => { + const messages = [ + { + content: [ + { + type: 'text', + text: { + value: 'Here is a citation: [citation]', + annotations: [ + { + type: 'file_citation', + text: '[citation]', + start_index: 20, + end_index: 30, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + ], + }, + }, + ], + created_at: 1, + }, + ]; + + retrieveAndProcessFile.mockResolvedValue({ filename: 'test.txt' }); + + const result = await processMessages({ + openai: {}, + client: { processedFileIds: new Set() }, + messages, + }); + + const expectedText = 'Here is a citation: ^1^\n\n^1.^ test.txt'; + + expect(result.text).toBe(expectedText); + expect(result.edited).toBe(true); + }); + + test('handles multiple annotation types in a single message', async () => { + const messages = [ + { + content: [ + { + type: 'text', + text: { + value: + 'File path: [file_path]. Citation: [citation1]. Another citation: [citation2].', + annotations: [ + { + type: 'file_path', + text: '[file_path]', + start_index: 11, + end_index: 22, + file_path: { + file_id: 'file-XXXXXXXXXXXXXXXX1', + }, + }, + { + type: 'file_citation', + text: '[citation1]', + start_index: 34, + end_index: 45, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXX2', + }, + }, + { + type: 'file_citation', + text: '[citation2]', + start_index: 65, + end_index: 76, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXX3', + }, + }, + ], + }, + }, + ], + created_at: 1, + }, + ]; + + retrieveAndProcessFile.mockResolvedValueOnce({ + filename: 'file1.txt', + filepath: '/path/to/file1.txt', + }); + retrieveAndProcessFile.mockResolvedValueOnce({ filename: 'file2.txt' }); + retrieveAndProcessFile.mockResolvedValueOnce({ filename: 'file3.txt' }); + + const result = await processMessages({ + openai: {}, + client: { processedFileIds: new Set() }, + messages, + }); + + const expectedText = + 'File path: /path/to/file1.txt. Citation: ^1^. Another citation: ^2^.\n\n^1.^ file2.txt\n^2.^ file3.txt'; + + expect(result.text).toBe(expectedText); + expect(result.edited).toBe(true); + }); + + test('handles annotation processing failure', async () => { + const messages = [ + { + content: [ + { + type: 'text', + text: { + value: 'This citation will fail: [citation]', + annotations: [ + { + type: 'file_citation', + text: '[citation]', + start_index: 25, + end_index: 35, + file_citation: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + ], + }, + }, + ], + created_at: 1, + }, + ]; + + retrieveAndProcessFile.mockRejectedValue(new Error('File not found')); + + const result = await processMessages({ + openai: {}, + client: { processedFileIds: new Set() }, + messages, + }); + + const expectedText = 'This citation will fail: [citation]'; + + expect(result.text).toBe(expectedText); + expect(result.edited).toBe(false); + }); + + test('handles multiple FILE_PATH annotations with sandbox links', async () => { + const messages = [ + { + id: 'msg_XXXXXXXXXXXXXXXXXXXX', + object: 'thread.message', + created_at: 1722983745, + assistant_id: 'asst_XXXXXXXXXXXXXXXXXXXX', + thread_id: 'thread_XXXXXXXXXXXXXXXXXXXX', + run_id: 'run_XXXXXXXXXXXXXXXXXXXX', + status: 'completed', + incomplete_details: null, + incomplete_at: null, + completed_at: 1722983747, + role: 'assistant', + content: [ + { + type: 'text', + text: { + value: + 'I have generated three dummy CSV files for you. You can download them using the links below:\n\n1. [Download Dummy Data 1](sandbox:/mnt/data/dummy_data1.csv)\n2. [Download Dummy Data 2](sandbox:/mnt/data/dummy_data2.csv)\n3. [Download Dummy Data 3](sandbox:/mnt/data/dummy_data3.csv)', + annotations: [ + { + type: 'file_path', + text: 'sandbox:/mnt/data/dummy_data1.csv', + start_index: 121, + end_index: 154, + file_path: { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + }, + }, + { + type: 'file_path', + text: 'sandbox:/mnt/data/dummy_data2.csv', + start_index: 183, + end_index: 216, + file_path: { + file_id: 'file-YYYYYYYYYYYYYYYYYYYY', + }, + }, + { + type: 'file_path', + text: 'sandbox:/mnt/data/dummy_data3.csv', + start_index: 245, + end_index: 278, + file_path: { + file_id: 'file-ZZZZZZZZZZZZZZZZZZZZ', + }, + }, + ], + }, + }, + ], + attachments: [ + { + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + tools: [ + { + type: 'code_interpreter', + }, + ], + }, + { + file_id: 'file-YYYYYYYYYYYYYYYYYYYY', + tools: [ + { + type: 'code_interpreter', + }, + ], + }, + { + file_id: 'file-ZZZZZZZZZZZZZZZZZZZZ', + tools: [ + { + type: 'code_interpreter', + }, + ], + }, + ], + metadata: {}, + files: [ + { + object: 'file', + id: 'file-XXXXXXXXXXXXXXXXXXXX', + purpose: 'assistants_output', + filename: 'dummy_data1.csv', + bytes: 1925, + created_at: 1722983746, + status: 'processed', + status_details: null, + type: 'text/csv', + file_id: 'file-XXXXXXXXXXXXXXXXXXXX', + filepath: + 'https://api.openai.com/v1/files/XXXXXXXXXXXXXXXXXXXX/file-XXXXXXXXXXXXXXXXXXXX/dummy_data1.csv', + usage: 1, + user: 'XXXXXXXXXXXXXXXXXXXX', + context: 'assistants_output', + source: 'openai', + model: 'gpt-4o-mini', + }, + { + object: 'file', + id: 'file-YYYYYYYYYYYYYYYYYYYY', + purpose: 'assistants_output', + filename: 'dummy_data2.csv', + bytes: 4221, + created_at: 1722983746, + status: 'processed', + status_details: null, + type: 'text/csv', + file_id: 'file-YYYYYYYYYYYYYYYYYYYY', + filepath: + 'https://api.openai.com/v1/files/XXXXXXXXXXXXXXXXXXXX/file-YYYYYYYYYYYYYYYYYYYY/dummy_data2.csv', + usage: 1, + user: 'XXXXXXXXXXXXXXXXXXXX', + context: 'assistants_output', + source: 'openai', + model: 'gpt-4o-mini', + }, + { + object: 'file', + id: 'file-ZZZZZZZZZZZZZZZZZZZZ', + purpose: 'assistants_output', + filename: 'dummy_data3.csv', + bytes: 3534, + created_at: 1722983747, + status: 'processed', + status_details: null, + type: 'text/csv', + file_id: 'file-ZZZZZZZZZZZZZZZZZZZZ', + filepath: + 'https://api.openai.com/v1/files/XXXXXXXXXXXXXXXXXXXX/file-ZZZZZZZZZZZZZZZZZZZZ/dummy_data3.csv', + usage: 1, + user: 'XXXXXXXXXXXXXXXXXXXX', + context: 'assistants_output', + source: 'openai', + model: 'gpt-4o-mini', + }, + ], + }, + ]; + + const mockClient = { + processedFileIds: new Set(), + }; + + // Mock the retrieveAndProcessFile function for each file + retrieveAndProcessFile.mockImplementation(({ file_id }) => { + const fileMap = { + 'file-XXXXXXXXXXXXXXXXXXXX': { + filename: 'dummy_data1.csv', + filepath: + 'https://api.openai.com/v1/files/XXXXXXXXXXXXXXXXXXXX/file-XXXXXXXXXXXXXXXXXXXX/dummy_data1.csv', + }, + 'file-YYYYYYYYYYYYYYYYYYYY': { + filename: 'dummy_data2.csv', + filepath: + 'https://api.openai.com/v1/files/XXXXXXXXXXXXXXXXXXXX/file-YYYYYYYYYYYYYYYYYYYY/dummy_data2.csv', + }, + 'file-ZZZZZZZZZZZZZZZZZZZZ': { + filename: 'dummy_data3.csv', + filepath: + 'https://api.openai.com/v1/files/XXXXXXXXXXXXXXXXXXXX/file-ZZZZZZZZZZZZZZZZZZZZ/dummy_data3.csv', + }, + }; + + return Promise.resolve(fileMap[file_id]); + }); + + const result = await processMessages({ openai: {}, client: mockClient, messages }); + + const expectedText = + 'I have generated three dummy CSV files for you. You can download them using the links below:\n\n1. [Download Dummy Data 1](https://api.openai.com/v1/files/XXXXXXXXXXXXXXXXXXXX/file-XXXXXXXXXXXXXXXXXXXX/dummy_data1.csv)\n2. [Download Dummy Data 2](https://api.openai.com/v1/files/XXXXXXXXXXXXXXXXXXXX/file-YYYYYYYYYYYYYYYYYYYY/dummy_data2.csv)\n3. [Download Dummy Data 3](https://api.openai.com/v1/files/XXXXXXXXXXXXXXXXXXXX/file-ZZZZZZZZZZZZZZZZZZZZ/dummy_data3.csv)'; + + expect(result.text).toBe(expectedText); + expect(result.edited).toBe(true); + }); +}); diff --git a/api/server/utils/crypto.js b/api/server/utils/crypto.js index 9fe1f898fba..c143506cc54 100644 --- a/api/server/utils/crypto.js +++ b/api/server/utils/crypto.js @@ -3,7 +3,7 @@ require('dotenv').config(); const { webcrypto } = require('node:crypto'); const key = Buffer.from(process.env.CREDS_KEY, 'hex'); const iv = Buffer.from(process.env.CREDS_IV, 'hex'); -const algorithm = 'aes-256-cbc'; +const algorithm = 'AES-CBC'; async function encrypt(value) { const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [ diff --git a/api/utils/tokens.js b/api/utils/tokens.js index dec669be2c0..122c3c3f0a4 100644 --- a/api/utils/tokens.js +++ b/api/utils/tokens.js @@ -11,6 +11,7 @@ const openAIModels = { 'gpt-4-0125': 127990, // -10 from max 'gpt-4o': 127990, // -10 from max 'gpt-4o-mini': 127990, // -10 from max + 'gpt-4o-2024-08-06': 127990, // -10 from max 'gpt-4-turbo': 127990, // -10 from max 'gpt-4-vision': 127990, // -10 from max 'gpt-3.5-turbo': 16375, // -10 from max diff --git a/client/package.json b/client/package.json index 35cf53795f0..2693ada9f78 100644 --- a/client/package.json +++ b/client/package.json @@ -1,6 +1,6 @@ { "name": "@librechat/frontend", - "version": "0.7.4-rc1", + "version": "0.7.4", "description": "", "type": "module", "scripts": { @@ -68,6 +68,7 @@ "lodash": "^4.17.21", "lucide-react": "^0.394.0", "match-sorter": "^6.3.4", + "msedge-tts": "^1.3.4", "rc-input-number": "^7.4.2", "react": "^18.2.0", "react-avatar-editor": "^13.0.2", diff --git a/client/src/App.jsx b/client/src/App.jsx index ce2ec3b6dec..e2b11b261f9 100644 --- a/client/src/App.jsx +++ b/client/src/App.jsx @@ -8,6 +8,7 @@ import { QueryClient, QueryClientProvider, QueryCache } from '@tanstack/react-qu import { ScreenshotProvider, ThemeProvider, useApiErrorBoundary } from './hooks'; import { ToastProvider } from './Providers'; import Toast from './components/ui/Toast'; +import { LiveAnnouncer } from '~/a11y'; import { router } from './routes'; const App = () => { @@ -26,18 +27,20 @@ const App = () => { return ( - - - - - - - - - - - - + + + + + + + + + + + + + + ); diff --git a/client/src/Providers/AnnouncerContext.tsx b/client/src/Providers/AnnouncerContext.tsx new file mode 100644 index 00000000000..8171492d34c --- /dev/null +++ b/client/src/Providers/AnnouncerContext.tsx @@ -0,0 +1,28 @@ +// AnnouncerContext.tsx +import React from 'react'; + +export interface AnnounceOptions { + message: string; + id?: string; + isStream?: boolean; + isComplete?: boolean; +} + +interface AnnouncerContextType { + announceAssertive: (options: AnnounceOptions) => void; + announcePolite: (options: AnnounceOptions) => void; +} + +const defaultContext: AnnouncerContextType = { + announceAssertive: () => console.warn('Announcement failed, LiveAnnouncer context is missing'), + announcePolite: () => console.warn('Announcement failed, LiveAnnouncer context is missing'), +}; + +const AnnouncerContext = React.createContext(defaultContext); + +export const useLiveAnnouncer = () => { + const context = React.useContext(AnnouncerContext); + return context; +}; + +export default AnnouncerContext; diff --git a/client/src/Providers/index.ts b/client/src/Providers/index.ts index 16487ded25c..3ac7e1ba05b 100644 --- a/client/src/Providers/index.ts +++ b/client/src/Providers/index.ts @@ -11,3 +11,4 @@ export * from './BookmarkContext'; export * from './DashboardContext'; export * from './AssistantsContext'; export * from './AssistantsMapContext'; +export * from './AnnouncerContext'; diff --git a/client/src/a11y/Announcer.tsx b/client/src/a11y/Announcer.tsx new file mode 100644 index 00000000000..1b79bf19182 --- /dev/null +++ b/client/src/a11y/Announcer.tsx @@ -0,0 +1,54 @@ +import React, { useState, useEffect } from 'react'; +import MessageBlock from './MessageBlock'; + +interface AnnouncerProps { + politeMessage: string; + politeMessageId: string; + assertiveMessage: string; + assertiveMessageId: string; +} + +const Announcer: React.FC = ({ + politeMessage, + politeMessageId, + assertiveMessage, + assertiveMessageId, +}) => { + const [state, setState] = useState({ + assertiveMessage1: '', + assertiveMessage2: '', + politeMessage1: '', + politeMessage2: '', + setAlternatePolite: false, + setAlternateAssertive: false, + }); + + useEffect(() => { + setState((prevState) => ({ + ...prevState, + politeMessage1: prevState.setAlternatePolite ? '' : politeMessage, + politeMessage2: prevState.setAlternatePolite ? politeMessage : '', + setAlternatePolite: !prevState.setAlternatePolite, + })); + }, [politeMessage, politeMessageId]); + + useEffect(() => { + setState((prevState) => ({ + ...prevState, + assertiveMessage1: prevState.setAlternateAssertive ? '' : assertiveMessage, + assertiveMessage2: prevState.setAlternateAssertive ? assertiveMessage : '', + setAlternateAssertive: !prevState.setAlternateAssertive, + })); + }, [assertiveMessage, assertiveMessageId]); + + return ( +
+ + + + +
+ ); +}; + +export default Announcer; diff --git a/client/src/a11y/LiveAnnouncer.tsx b/client/src/a11y/LiveAnnouncer.tsx new file mode 100644 index 00000000000..7d56ebc2f55 --- /dev/null +++ b/client/src/a11y/LiveAnnouncer.tsx @@ -0,0 +1,109 @@ +import React, { useState, useCallback, useRef, useEffect } from 'react'; +import { findLastSeparatorIndex } from 'librechat-data-provider'; +import type { AnnounceOptions } from '~/Providers/AnnouncerContext'; +import AnnouncerContext from '~/Providers/AnnouncerContext'; +import Announcer from './Announcer'; + +interface LiveAnnouncerProps { + children: React.ReactNode; +} + +const LiveAnnouncer: React.FC = ({ children }) => { + const [announcePoliteMessage, setAnnouncePoliteMessage] = useState(''); + const [politeMessageId, setPoliteMessageId] = useState(''); + const [announceAssertiveMessage, setAnnounceAssertiveMessage] = useState(''); + const [assertiveMessageId, setAssertiveMessageId] = useState(''); + + const politeProcessedTextRef = useRef(''); + const politeQueueRef = useRef>([]); + const isAnnouncingRef = useRef(false); + const counterRef = useRef(0); + + const generateUniqueId = (prefix: string) => { + counterRef.current += 1; + return `${prefix}-${counterRef.current}`; + }; + + const processChunks = (text: string, processedTextRef: React.MutableRefObject) => { + const remainingText = text.slice(processedTextRef.current.length); + const separatorIndex = findLastSeparatorIndex(remainingText); + if (separatorIndex !== -1) { + const chunkText = remainingText.slice(0, separatorIndex + 1); + processedTextRef.current += chunkText; + return chunkText.trim(); + } + return ''; + }; + + const announceNextInQueue = useCallback(() => { + if (politeQueueRef.current.length > 0 && !isAnnouncingRef.current) { + isAnnouncingRef.current = true; + const nextAnnouncement = politeQueueRef.current.shift(); + if (nextAnnouncement) { + setAnnouncePoliteMessage(nextAnnouncement.message); + setPoliteMessageId(nextAnnouncement.id); + setTimeout(() => { + isAnnouncingRef.current = false; + announceNextInQueue(); + }, 100); + } + } + }, []); + + const announcePolite = useCallback( + ({ message, id, isStream = false, isComplete = false }: AnnounceOptions) => { + const announcementId = id ?? generateUniqueId('polite'); + if (isStream) { + const chunk = processChunks(message, politeProcessedTextRef); + if (chunk) { + politeQueueRef.current.push({ message: chunk, id: announcementId }); + announceNextInQueue(); + } + } else if (isComplete) { + const remainingText = message.slice(politeProcessedTextRef.current.length); + if (remainingText.trim()) { + politeQueueRef.current.push({ message: remainingText.trim(), id: announcementId }); + announceNextInQueue(); + } + politeProcessedTextRef.current = ''; + } else { + politeQueueRef.current.push({ message, id: announcementId }); + announceNextInQueue(); + politeProcessedTextRef.current = ''; + } + }, + [announceNextInQueue], + ); + + const announceAssertive = useCallback(({ message, id }: AnnounceOptions) => { + const announcementId = id ?? generateUniqueId('assertive'); + setAnnounceAssertiveMessage(message); + setAssertiveMessageId(announcementId); + }, []); + + const contextValue = { + announcePolite, + announceAssertive, + }; + + useEffect(() => { + return () => { + politeQueueRef.current = []; + isAnnouncingRef.current = false; + }; + }, []); + + return ( + + {children} + + + ); +}; + +export default LiveAnnouncer; diff --git a/client/src/a11y/LiveMessage.tsx b/client/src/a11y/LiveMessage.tsx new file mode 100644 index 00000000000..b773deae53d --- /dev/null +++ b/client/src/a11y/LiveMessage.tsx @@ -0,0 +1,37 @@ +import React, { useEffect, useContext } from 'react'; +import AnnouncerContext from '~/Providers/AnnouncerContext'; + +interface LiveMessageProps { + message: string; + 'aria-live': 'polite' | 'assertive'; + clearOnUnmount?: boolean | 'true' | 'false'; +} + +const LiveMessage: React.FC = ({ + message, + 'aria-live': ariaLive, + clearOnUnmount, +}) => { + const { announceAssertive, announcePolite } = useContext(AnnouncerContext); + + useEffect(() => { + if (ariaLive === 'assertive') { + announceAssertive(message); + } else if (ariaLive === 'polite') { + announcePolite(message); + } + }, [message, ariaLive, announceAssertive, announcePolite]); + + useEffect(() => { + return () => { + if (clearOnUnmount === true || clearOnUnmount === 'true') { + announceAssertive(''); + announcePolite(''); + } + }; + }, [clearOnUnmount, announceAssertive, announcePolite]); + + return null; +}; + +export default LiveMessage; diff --git a/client/src/a11y/LiveMessenger.tsx b/client/src/a11y/LiveMessenger.tsx new file mode 100644 index 00000000000..3dd82232aa8 --- /dev/null +++ b/client/src/a11y/LiveMessenger.tsx @@ -0,0 +1,12 @@ +import React from 'react'; +import AnnouncerContext from '~/Providers/AnnouncerContext'; + +interface LiveMessengerProps { + children: (context: React.ContextType) => React.ReactNode; +} + +const LiveMessenger: React.FC = ({ children }) => ( + {(contextProps) => children(contextProps)} +); + +export default LiveMessenger; diff --git a/client/src/a11y/MessageBlock.tsx b/client/src/a11y/MessageBlock.tsx new file mode 100644 index 00000000000..a99157b6bdc --- /dev/null +++ b/client/src/a11y/MessageBlock.tsx @@ -0,0 +1,26 @@ +import React from 'react'; + +const offScreenStyle: React.CSSProperties = { + border: 0, + clip: 'rect(0 0 0 0)', + height: '1px', + margin: '-1px', + overflow: 'hidden', + whiteSpace: 'nowrap', + padding: 0, + width: '1px', + position: 'absolute', +}; + +interface MessageBlockProps { + message: string; + 'aria-live': 'polite' | 'assertive'; +} + +const MessageBlock: React.FC = ({ message, 'aria-live': ariaLive }) => ( +
+ {message} +
+); + +export default MessageBlock; diff --git a/client/src/a11y/index.ts b/client/src/a11y/index.ts new file mode 100644 index 00000000000..60cd708b76d --- /dev/null +++ b/client/src/a11y/index.ts @@ -0,0 +1 @@ +export { default as LiveAnnouncer } from './LiveAnnouncer'; diff --git a/client/src/common/types.ts b/client/src/common/types.ts index 3317b634d1a..e79bf2bddfd 100644 --- a/client/src/common/types.ts +++ b/client/src/common/types.ts @@ -19,6 +19,7 @@ import type { TStartupConfig, EModelEndpoint, AssistantsEndpoint, + TMessageContentParts, AuthorizationTypeEnum, TSetOption as SetOption, TokenExchangeMethodEnum, @@ -26,6 +27,22 @@ import type { import type { UseMutationResult } from '@tanstack/react-query'; import type { LucideIcon } from 'lucide-react'; +export enum PromptsEditorMode { + SIMPLE = 'simple', + ADVANCED = 'advanced', +} + +export enum STTEndpoints { + browser = 'browser', + external = 'external', +} + +export enum TTSEndpoints { + browser = 'browser', + edge = 'edge', + external = 'external', +} + export type AudioChunk = { audio: string; isFinal: boolean; @@ -369,6 +386,19 @@ export type Option = Record & { value: string | number | null; }; +export type VoiceOption = { + value: string; + label: string; +}; + +export type TMessageAudio = { + messageId?: string; + content?: TMessageContentParts[] | string; + className?: string; + isLast: boolean; + index: number; +}; + export type OptionWithIcon = Option & { icon?: React.ReactNode }; export type MentionOption = OptionWithIcon & { type: string; diff --git a/client/src/components/Audio/TTS.tsx b/client/src/components/Audio/TTS.tsx new file mode 100644 index 00000000000..0ccad8a0517 --- /dev/null +++ b/client/src/components/Audio/TTS.tsx @@ -0,0 +1,256 @@ +import { useEffect, useMemo } from 'react'; +import { useRecoilValue } from 'recoil'; +import type { TMessageAudio } from '~/common'; +import { useLocalize, useTTSBrowser, useTTSEdge, useTTSExternal } from '~/hooks'; +import { VolumeIcon, VolumeMuteIcon, Spinner } from '~/components/svg'; +import { useToastContext } from '~/Providers/ToastContext'; +import { logger } from '~/utils'; +import store from '~/store'; + +export function BrowserTTS({ isLast, index, messageId, content, className }: TMessageAudio) { + const localize = useLocalize(); + const playbackRate = useRecoilValue(store.playbackRate); + + const { toggleSpeech, isSpeaking, isLoading, audioRef } = useTTSBrowser({ + isLast, + index, + messageId, + content, + }); + + const renderIcon = (size: string) => { + if (isLoading === true) { + return ; + } + + if (isSpeaking === true) { + return ; + } + + return ; + }; + + useEffect(() => { + const messageAudio = document.getElementById(`audio-${messageId}`) as HTMLAudioElement | null; + if (!messageAudio) { + return; + } + if (playbackRate != null && playbackRate > 0 && messageAudio.playbackRate !== playbackRate) { + messageAudio.playbackRate = playbackRate; + } + }, [audioRef, isSpeaking, playbackRate, messageId]); + + logger.log( + 'MessageAudio: audioRef.current?.src, audioRef.current', + audioRef.current?.src, + audioRef.current, + ); + + return ( + <> + +