Skip to content

Commit

Permalink
🛠 refactor: Ensure File Deletions, File Naming, and Agent Resource Up…
Browse files Browse the repository at this point in the history
…dates (danny-avila#5928)

* refactor: Improve error logging for file upload and processing functions to prevent verbosity

* refactor: Add uploads directory to Docker Compose to persist file uploads

* refactor: `addAgentResourceFile` to handle edge case of non-existing `tool_resource` array

* refactor: Remove version specification from deploy-compose.yml

* refactor: Prefix filenames with file_id to ensure uniqueness in file uploads

* refactor: Enhance error handling in deleteVectors to log warnings for non-404 errors

* refactor: Limit file search results to top 5 based on relevance score

* 🌍 i18n: Update translation.json with latest translations

---------

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
  • Loading branch information
danny-avila and github-actions[bot] authored Feb 18, 2025
1 parent f0f0913 commit 964a74c
Show file tree
Hide file tree
Showing 31 changed files with 262 additions and 102 deletions.
3 changes: 2 additions & 1 deletion api/app/clients/tools/util/fileSearch.js
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,8 @@ const createFileSearchTool = async ({ req, files, entity_id }) => {
relevanceScore,
})),
)
.sort((a, b) => b.relevanceScore - a.relevanceScore);
.sort((a, b) => b.relevanceScore - a.relevanceScore)
.slice(0, 5);

const formattedString = formattedResults
.map(
Expand Down
16 changes: 14 additions & 2 deletions api/models/Agent.js
Original file line number Diff line number Diff line change
Expand Up @@ -97,11 +97,22 @@ const updateAgent = async (searchParameter, updateData) => {
const addAgentResourceFile = async ({ agent_id, tool_resource, file_id }) => {
const searchParameter = { id: agent_id };

// build the update to push or create the file ids set
const fileIdsPath = `tool_resources.${tool_resource}.file_ids`;

await Agent.updateOne(
{
id: agent_id,
[`${fileIdsPath}`]: { $exists: false },
},
{
$set: {
[`${fileIdsPath}`]: [],
},
},
);

const updateData = { $addToSet: { [fileIdsPath]: file_id } };

// return the updated agent or throw if no agent matches
const updatedAgent = await updateAgent(searchParameter, updateData);
if (updatedAgent) {
return updatedAgent;
Expand Down Expand Up @@ -290,6 +301,7 @@ const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds
};

module.exports = {
Agent,
getAgent,
loadAgent,
createAgent,
Expand Down
160 changes: 160 additions & 0 deletions api/models/Agent.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,160 @@
const mongoose = require('mongoose');
const { v4: uuidv4 } = require('uuid');
const { MongoMemoryServer } = require('mongodb-memory-server');
const { Agent, addAgentResourceFile, removeAgentResourceFiles } = require('./Agent');

describe('Agent Resource File Operations', () => {
let mongoServer;

beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
const mongoUri = mongoServer.getUri();
await mongoose.connect(mongoUri);
});

afterAll(async () => {
await mongoose.disconnect();
await mongoServer.stop();
});

beforeEach(async () => {
await Agent.deleteMany({});
});

const createBasicAgent = async () => {
const agentId = `agent_${uuidv4()}`;
const agent = await Agent.create({
id: agentId,
name: 'Test Agent',
provider: 'test',
model: 'test-model',
author: new mongoose.Types.ObjectId(),
});
return agent;
};

test('should handle concurrent file additions', async () => {
const agent = await createBasicAgent();
const fileIds = Array.from({ length: 10 }, () => uuidv4());

// Concurrent additions
const additionPromises = fileIds.map((fileId) =>
addAgentResourceFile({
agent_id: agent.id,
tool_resource: 'test_tool',
file_id: fileId,
}),
);

await Promise.all(additionPromises);

const updatedAgent = await Agent.findOne({ id: agent.id });
expect(updatedAgent.tool_resources.test_tool.file_ids).toBeDefined();
expect(updatedAgent.tool_resources.test_tool.file_ids).toHaveLength(10);
expect(new Set(updatedAgent.tool_resources.test_tool.file_ids).size).toBe(10);
});

test('should handle concurrent additions and removals', async () => {
const agent = await createBasicAgent();
const initialFileIds = Array.from({ length: 5 }, () => uuidv4());

await Promise.all(
initialFileIds.map((fileId) =>
addAgentResourceFile({
agent_id: agent.id,
tool_resource: 'test_tool',
file_id: fileId,
}),
),
);

const newFileIds = Array.from({ length: 5 }, () => uuidv4());
const operations = [
...newFileIds.map((fileId) =>
addAgentResourceFile({
agent_id: agent.id,
tool_resource: 'test_tool',
file_id: fileId,
}),
),
...initialFileIds.map((fileId) =>
removeAgentResourceFiles({
agent_id: agent.id,
files: [{ tool_resource: 'test_tool', file_id: fileId }],
}),
),
];

await Promise.all(operations);

const updatedAgent = await Agent.findOne({ id: agent.id });
expect(updatedAgent.tool_resources.test_tool.file_ids).toBeDefined();
expect(updatedAgent.tool_resources.test_tool.file_ids).toHaveLength(5);
});

test('should initialize array when adding to non-existent tool resource', async () => {
const agent = await createBasicAgent();
const fileId = uuidv4();

const updatedAgent = await addAgentResourceFile({
agent_id: agent.id,
tool_resource: 'new_tool',
file_id: fileId,
});

expect(updatedAgent.tool_resources.new_tool.file_ids).toBeDefined();
expect(updatedAgent.tool_resources.new_tool.file_ids).toHaveLength(1);
expect(updatedAgent.tool_resources.new_tool.file_ids[0]).toBe(fileId);
});

test('should handle rapid sequential modifications to same tool resource', async () => {
const agent = await createBasicAgent();
const fileId = uuidv4();

for (let i = 0; i < 10; i++) {
await addAgentResourceFile({
agent_id: agent.id,
tool_resource: 'test_tool',
file_id: `${fileId}_${i}`,
});

if (i % 2 === 0) {
await removeAgentResourceFiles({
agent_id: agent.id,
files: [{ tool_resource: 'test_tool', file_id: `${fileId}_${i}` }],
});
}
}

const updatedAgent = await Agent.findOne({ id: agent.id });
expect(updatedAgent.tool_resources.test_tool.file_ids).toBeDefined();
expect(Array.isArray(updatedAgent.tool_resources.test_tool.file_ids)).toBe(true);
});

test('should handle multiple tool resources concurrently', async () => {
const agent = await createBasicAgent();
const toolResources = ['tool1', 'tool2', 'tool3'];
const operations = [];

toolResources.forEach((tool) => {
const fileIds = Array.from({ length: 5 }, () => uuidv4());
fileIds.forEach((fileId) => {
operations.push(
addAgentResourceFile({
agent_id: agent.id,
tool_resource: tool,
file_id: fileId,
}),
);
});
});

await Promise.all(operations);

const updatedAgent = await Agent.findOne({ id: agent.id });
toolResources.forEach((tool) => {
expect(updatedAgent.tool_resources[tool].file_ids).toBeDefined();
expect(updatedAgent.tool_resources[tool].file_ids).toHaveLength(5);
});
});
});
7 changes: 6 additions & 1 deletion api/server/services/Files/Code/crud.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
const axios = require('axios');
const FormData = require('form-data');
const { getCodeBaseURL } = require('@librechat/agents');
const { logAxiosError } = require('~/utils');

const MAX_FILE_SIZE = 150 * 1024 * 1024;

Expand Down Expand Up @@ -78,7 +79,11 @@ async function uploadCodeEnvFile({ req, stream, filename, apiKey, entity_id = ''

return `${fileIdentifier}?entity_id=${entity_id}`;
} catch (error) {
throw new Error(`Error uploading file: ${error.message}`);
logAxiosError({
message: `Error uploading code environment file: ${error.message}`,
error,
});
throw new Error(`Error uploading code environment file: ${error.message}`);
}
}

Expand Down
11 changes: 9 additions & 2 deletions api/server/services/Files/Code/process.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ const {
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { convertImage } = require('~/server/services/Files/images/convert');
const { createFile, getFiles, updateFile } = require('~/models/File');
const { logAxiosError } = require('~/utils');
const { logger } = require('~/config');

/**
Expand Down Expand Up @@ -85,7 +86,10 @@ const processCodeOutput = async ({
/** Note: `messageId` & `toolCallId` are not part of file DB schema; message object records associated file ID */
return Object.assign(file, { messageId, toolCallId });
} catch (error) {
logger.error('Error downloading file:', error);
logAxiosError({
message: 'Error downloading code environment file',
error,
});
}
};

Expand Down Expand Up @@ -135,7 +139,10 @@ async function getSessionInfo(fileIdentifier, apiKey) {

return response.data.find((file) => file.name.startsWith(path))?.lastModified;
} catch (error) {
logger.error(`Error fetching session info: ${error.message}`, error);
logAxiosError({
message: `Error fetching session info: ${error.message}`,
error,
});
return null;
}
}
Expand Down
9 changes: 8 additions & 1 deletion api/server/services/Files/VectorDB/crud.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,14 @@ const deleteVectors = async (req, file) => {
error,
message: 'Error deleting vectors',
});
throw new Error(error.message || 'An error occurred during file deletion.');
if (
error.response &&
error.response.status !== 404 &&
(error.response.status < 200 || error.response.status >= 300)
) {
logger.warn('Error deleting vectors, file will not be deleted');
throw new Error(error.message || 'An error occurred during file deletion.');
}
}
};

Expand Down
7 changes: 3 additions & 4 deletions api/server/services/Files/process.js
Original file line number Diff line number Diff line change
Expand Up @@ -347,8 +347,8 @@ const uploadImageBuffer = async ({ req, context, metadata = {}, resize = true })
req.app.locals.imageOutputType
}`;
}

const filepath = await saveBuffer({ userId: req.user.id, fileName: filename, buffer });
const fileName = `${file_id}-${filename}`;
const filepath = await saveBuffer({ userId: req.user.id, fileName, buffer });
return await createFile(
{
user: req.user.id,
Expand Down Expand Up @@ -801,8 +801,7 @@ async function saveBase64Image(
{ req, file_id: _file_id, filename: _filename, endpoint, context, resolution = 'high' },
) {
const file_id = _file_id ?? v4();

let filename = _filename;
let filename = `${file_id}-${_filename}`;
const { buffer: inputBuffer, type } = base64ToBuffer(url);
if (!path.extname(_filename)) {
const extension = mime.getExtension(type);
Expand Down
3 changes: 1 addition & 2 deletions client/src/locales/ar/translation.json
Original file line number Diff line number Diff line change
Expand Up @@ -584,7 +584,6 @@
"com_ui_happy_birthday": "إنه عيد ميلادي الأول!",
"com_ui_host": "مُضيف",
"com_ui_image_gen": "توليد الصور",
"com_ui_import": "استيراد",
"com_ui_import_conversation_error": "حدث خطأ أثناء استيراد محادثاتك",
"com_ui_import_conversation_file_type_error": "نوع الملف غير مدعوم للاستيراد",
"com_ui_import_conversation_info": "استيراد محادثات من ملف JSON",
Expand Down Expand Up @@ -716,4 +715,4 @@
"com_ui_zoom": "تكبير",
"com_user_message": "أنت",
"com_warning_resubmit_unsupported": "إعادة إرسال رسالة الذكاء الاصطناعي غير مدعومة لنقطة النهاية هذه"
}
}
3 changes: 1 addition & 2 deletions client/src/locales/de/translation.json
Original file line number Diff line number Diff line change
Expand Up @@ -614,7 +614,6 @@
"com_ui_hide_qr": "QR-Code ausblenden",
"com_ui_host": "Host",
"com_ui_image_gen": "Bildgenerierung",
"com_ui_import": "Importieren",
"com_ui_import_conversation_error": "Beim Importieren Ihrer Konversationen ist ein Fehler aufgetreten",
"com_ui_import_conversation_file_type_error": "Nicht unterstützter Importtyp",
"com_ui_import_conversation_info": "Konversationen aus einer JSON-Datei importieren",
Expand Down Expand Up @@ -764,4 +763,4 @@
"com_ui_zoom": "Zoom",
"com_user_message": "Du",
"com_warning_resubmit_unsupported": "Das erneute Senden der KI-Nachricht wird für diesen Endpunkt nicht unterstützt."
}
}
Loading

0 comments on commit 964a74c

Please sign in to comment.