diff --git a/CHANGELOG.md b/CHANGELOG.md
index 75a0fa0d8c70..0a500e3e8082 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,56 @@
# Changelog
+### [Version 1.36.46](https://github.com/lobehub/lobe-chat/compare/v1.36.45...v1.36.46)
+
+Released on **2024-12-21**
+
+#### ♻ Code Refactoring
+
+- **misc**: Refactor client mode upload to match server mode.
+
+
+
+
+Improvements and Fixes
+
+#### Code refactoring
+
+- **misc**: Refactor client mode upload to match server mode, closes [#5111](https://github.com/lobehub/lobe-chat/issues/5111) ([0361ced](https://github.com/lobehub/lobe-chat/commit/0361ced))
+
+
+
+
+
+[![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
+
+
+
+### [Version 1.36.45](https://github.com/lobehub/lobe-chat/compare/v1.36.44...v1.36.45)
+
+Released on **2024-12-21**
+
+#### 💄 Styles
+
+- **misc**: Add o1 model in GitHub models.
+
+
+
+
+Improvements and Fixes
+
+#### Styles
+
+- **misc**: Add o1 model in GitHub models, closes [#5110](https://github.com/lobehub/lobe-chat/issues/5110) ([91dc5d7](https://github.com/lobehub/lobe-chat/commit/91dc5d7))
+
+
+
+
+
+[![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)
+
+
+
### [Version 1.36.44](https://github.com/lobehub/lobe-chat/compare/v1.36.43...v1.36.44)
Released on **2024-12-21**
diff --git a/changelog/v1.json b/changelog/v1.json
index 1337ee650145..77397b4a0139 100644
--- a/changelog/v1.json
+++ b/changelog/v1.json
@@ -1,4 +1,18 @@
[
+ {
+ "children": {
+ "improvements": ["Refactor client mode upload to match server mode."]
+ },
+ "date": "2024-12-21",
+ "version": "1.36.46"
+ },
+ {
+ "children": {
+ "improvements": ["Add o1 model in GitHub models."]
+ },
+ "date": "2024-12-21",
+ "version": "1.36.45"
+ },
{
"children": {
"improvements": ["Add Gemini flash thinking model."]
diff --git a/docs/self-hosting/advanced/auth/next-auth/logto.mdx b/docs/self-hosting/advanced/auth/next-auth/logto.mdx
index 7c9f73ce6b72..549864b6274d 100644
--- a/docs/self-hosting/advanced/auth/next-auth/logto.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/logto.mdx
@@ -75,10 +75,32 @@ When deploying LobeChat, you need to configure the following environment variabl
Visit [📘 Environment Variables](/docs/self-hosting/environment-variables/auth#logto) for details on related variables.
-
+
+### Troubleshooting
+
+If you encounter issues during the Logto deployment process, refer to the following common problems:
+
+- `Only roles with the xxx attribute may create roles`:
+Check your database user's permissions and ensure that the user in your Logto database has the `admin` role to create roles.
+
+- Error executing `logto db seed` on third-party databases like `Neon`:
+Try using the `logto db seed --encrypt-base-role` command.
+
+- Database seeding failed:
+Try skipping the seeding process with the `--skip-seed` parameter.
+
+- `Error: role xxx already exists`:
+Delete the existing role in the database.
+
+- Database migration failed after a version upgrade:
+Try using the command `npx @logto/cli db alteration deploy $version` (e.g., `npx @logto/cli db alteration deploy 1.22.0`).
+
+- I am using Docker deployment and want a one-click upgrade:
+Execute the custom command in the container: `sh -c "npm run cli db seed -- --swe --encrypt-base-role" && npx @logto/cli db alteration deploy $version && npm start`
+
After successful deployment, users will be able to authenticate via Logto and use LobeChat.
diff --git a/docs/self-hosting/advanced/auth/next-auth/logto.zh-CN.mdx b/docs/self-hosting/advanced/auth/next-auth/logto.zh-CN.mdx
index cab4ee751b51..a97d2031a118 100644
--- a/docs/self-hosting/advanced/auth/next-auth/logto.zh-CN.mdx
+++ b/docs/self-hosting/advanced/auth/next-auth/logto.zh-CN.mdx
@@ -72,8 +72,30 @@ tags:
前往 [📘 环境变量](/zh/docs/self-hosting/environment-variables/auth#logto) 可查阅相关变量详情。
-
+
+### 故障排除
+
+若你在部署 Logto 过程中遇到问题,可以参考以下常见问题:
+
+- `Only roles with the xxx attribute may create roles`:
+请检查你的数据库用户权限,确保你的 Logto 数据库中的用户具有 `admin` 角色,以便创建角色。
+
+- 在第三方数据库例如 `Neon` 上执行`logto db seed`出错:
+尝试使用`logto db seed --encrypt-base-role`命令。
+
+- 数据库播种失败:
+请尝试使用`--skip-seed`参数跳过播种。
+
+- `Error: role xxx already exists`:
+在数据库中删除已存在的角色即可。
+
+- 版本升级后,数据库迁移失败:
+请尝试使用` npx @logto/cli db alteration deploy $version`命令(例如`npx @logto/cli db alteration deploy 1.22.0`)
+
+- 我使用 docker 部署 希望一键升级:
+在容器中执行自定义命令:`sh -c "npm run cli db seed -- --swe --encrypt-base-role" && npx @logto/cli db alteration deploy $version && npm start`
+
部署成功后,用户将可以通过 Logto 身份认证并使用 LobeChat。
diff --git a/package.json b/package.json
index da9fcecc5e8f..4eb7bd68fc90 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "@lobehub/chat",
- "version": "1.36.44",
+ "version": "1.36.46",
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
"keywords": [
"framework",
diff --git a/src/config/modelProviders/github.ts b/src/config/modelProviders/github.ts
index e23b0a22b05c..0c8745d70162 100644
--- a/src/config/modelProviders/github.ts
+++ b/src/config/modelProviders/github.ts
@@ -4,6 +4,16 @@ import { ModelProviderCard } from '@/types/llm';
// https://github.com/marketplace/models
const Github: ModelProviderCard = {
chatModels: [
+ {
+ description: '专注于高级推理和解决复杂问题,包括数学和科学任务。非常适合需要深入上下文理解和代理工作流程的应用程序。',
+ displayName: 'OpenAI o1',
+ enabled: true,
+ functionCall: false,
+ id: 'o1',
+ maxOutput: 100_000,
+ tokens: 200_000,
+ vision: true,
+ },
{
description: '比 o1-preview 更小、更快,成本低80%,在代码生成和小上下文操作方面表现良好。',
displayName: 'OpenAI o1-mini',
diff --git a/src/database/_deprecated/models/file.ts b/src/database/_deprecated/models/file.ts
index f4a3244d8914..1edb2ddcbdb8 100644
--- a/src/database/_deprecated/models/file.ts
+++ b/src/database/_deprecated/models/file.ts
@@ -1,5 +1,6 @@
import { DBModel } from '@/database/_deprecated/core/types/db';
import { DB_File, DB_FileSchema } from '@/database/_deprecated/schemas/files';
+import { clientS3Storage } from '@/services/file/ClientS3';
import { nanoid } from '@/utils/uuid';
import { BaseModel } from '../core';
@@ -20,9 +21,15 @@ class _FileModel extends BaseModel<'files'> {
if (!item) return;
// arrayBuffer to url
- const base64 = Buffer.from(item.data!).toString('base64');
-
- return { ...item, url: `data:${item.fileType};base64,${base64}` };
+ let base64;
+ if (!item.data) {
+ const hash = (item.url as string).replace('client-s3://', '');
+ base64 = await this.getBase64ByFileHash(hash);
+ } else {
+ base64 = Buffer.from(item.data).toString('base64');
+ }
+
+ return { ...item, base64, url: `data:${item.fileType};base64,${base64}` };
}
async delete(id: string) {
@@ -32,6 +39,13 @@ class _FileModel extends BaseModel<'files'> {
async clear() {
return this.table.clear();
}
+
+ private async getBase64ByFileHash(hash: string) {
+ const fileItem = await clientS3Storage.getObject(hash);
+ if (!fileItem) throw new Error('file not found');
+
+ return Buffer.from(await fileItem.arrayBuffer()).toString('base64');
+ }
}
export const FileModel = new _FileModel();
diff --git a/src/server/routers/lambda/file.ts b/src/server/routers/lambda/file.ts
index a030c1ddf163..f31fc4d1aee8 100644
--- a/src/server/routers/lambda/file.ts
+++ b/src/server/routers/lambda/file.ts
@@ -32,9 +32,7 @@ export const fileRouter = router({
}),
createFile: fileProcedure
- .input(
- UploadFileSchema.omit({ data: true, saveMode: true, url: true }).extend({ url: z.string() }),
- )
+ .input(UploadFileSchema.omit({ url: true }).extend({ url: z.string() }))
.mutation(async ({ ctx, input }) => {
const { isExist } = await ctx.fileModel.checkHash(input.hash!);
diff --git a/src/services/__tests__/upload.test.ts b/src/services/__tests__/upload.test.ts
new file mode 100644
index 000000000000..96c25fac6a92
--- /dev/null
+++ b/src/services/__tests__/upload.test.ts
@@ -0,0 +1,175 @@
+import { beforeEach, describe, expect, it, vi } from 'vitest';
+
+import { fileEnv } from '@/config/file';
+import { edgeClient } from '@/libs/trpc/client';
+import { API_ENDPOINTS } from '@/services/_url';
+import { clientS3Storage } from '@/services/file/ClientS3';
+
+import { UPLOAD_NETWORK_ERROR, uploadService } from '../upload';
+
+// Mock dependencies
+vi.mock('@/libs/trpc/client', () => ({
+ edgeClient: {
+ upload: {
+ createS3PreSignedUrl: {
+ mutate: vi.fn(),
+ },
+ },
+ },
+}));
+
+vi.mock('@/services/file/ClientS3', () => ({
+ clientS3Storage: {
+ putObject: vi.fn(),
+ },
+}));
+
+vi.mock('@/utils/uuid', () => ({
+ uuid: () => 'mock-uuid',
+}));
+
+describe('UploadService', () => {
+ const mockFile = new File(['test'], 'test.png', { type: 'image/png' });
+ const mockPreSignUrl = 'https://example.com/presign';
+
+ beforeEach(() => {
+ vi.clearAllMocks();
+ // Mock Date.now
+ vi.spyOn(Date, 'now').mockImplementation(() => 3600000); // 1 hour in milliseconds
+ });
+
+ describe('uploadWithProgress', () => {
+ beforeEach(() => {
+ // Mock XMLHttpRequest
+ const xhrMock = {
+ upload: {
+ addEventListener: vi.fn(),
+ },
+ open: vi.fn(),
+ send: vi.fn(),
+ setRequestHeader: vi.fn(),
+ addEventListener: vi.fn(),
+ status: 200,
+ };
+ global.XMLHttpRequest = vi.fn(() => xhrMock) as any;
+
+ // Mock createS3PreSignedUrl
+ (edgeClient.upload.createS3PreSignedUrl.mutate as any).mockResolvedValue(mockPreSignUrl);
+ });
+
+ it('should upload file successfully with progress', async () => {
+ const onProgress = vi.fn();
+ const xhr = new XMLHttpRequest();
+
+ // Simulate successful upload
+ vi.spyOn(xhr, 'addEventListener').mockImplementation((event, handler) => {
+ if (event === 'load') {
+ // @ts-ignore
+ handler({ target: { status: 200 } });
+ }
+ });
+
+ const result = await uploadService.uploadWithProgress(mockFile, { onProgress });
+
+ expect(result).toEqual({
+ date: '1',
+ dirname: `${fileEnv.NEXT_PUBLIC_S3_FILE_PATH}/1`,
+ filename: 'mock-uuid.png',
+ path: `${fileEnv.NEXT_PUBLIC_S3_FILE_PATH}/1/mock-uuid.png`,
+ });
+ });
+
+ it('should handle network error', async () => {
+ const xhr = new XMLHttpRequest();
+
+ // Simulate network error
+ vi.spyOn(xhr, 'addEventListener').mockImplementation((event, handler) => {
+ if (event === 'error') {
+ Object.assign(xhr, { status: 0 });
+ // @ts-ignore
+ handler({});
+ }
+ });
+
+ await expect(uploadService.uploadWithProgress(mockFile, {})).rejects.toBe(
+ UPLOAD_NETWORK_ERROR,
+ );
+ });
+
+ it('should handle upload error', async () => {
+ const xhr = new XMLHttpRequest();
+
+ // Simulate upload error
+ vi.spyOn(xhr, 'addEventListener').mockImplementation((event, handler) => {
+ if (event === 'load') {
+ Object.assign(xhr, { status: 400, statusText: 'Bad Request' });
+
+ // @ts-ignore
+ handler({});
+ }
+ });
+
+ await expect(uploadService.uploadWithProgress(mockFile, {})).rejects.toBe('Bad Request');
+ });
+ });
+
+ describe('uploadToClientS3', () => {
+ it('should upload file to client S3 successfully', async () => {
+ const hash = 'test-hash';
+ const expectedResult = {
+ date: '1',
+ dirname: '',
+ filename: mockFile.name,
+ path: `client-s3://${hash}`,
+ };
+
+ (clientS3Storage.putObject as any).mockResolvedValue(undefined);
+
+ const result = await uploadService.uploadToClientS3(hash, mockFile);
+
+ expect(clientS3Storage.putObject).toHaveBeenCalledWith(hash, mockFile);
+ expect(result).toEqual(expectedResult);
+ });
+ });
+
+ describe('getImageFileByUrlWithCORS', () => {
+ beforeEach(() => {
+ global.fetch = vi.fn();
+ });
+
+ it('should fetch and create file from URL', async () => {
+ const url = 'https://example.com/image.png';
+ const filename = 'test.png';
+ const mockArrayBuffer = new ArrayBuffer(8);
+
+ (global.fetch as any).mockResolvedValue({
+ arrayBuffer: () => Promise.resolve(mockArrayBuffer),
+ });
+
+ const result = await uploadService.getImageFileByUrlWithCORS(url, filename);
+
+ expect(global.fetch).toHaveBeenCalledWith(API_ENDPOINTS.proxy, {
+ body: url,
+ method: 'POST',
+ });
+ expect(result).toBeInstanceOf(File);
+ expect(result.name).toBe(filename);
+ expect(result.type).toBe('image/png');
+ });
+
+ it('should handle custom file type', async () => {
+ const url = 'https://example.com/image.jpg';
+ const filename = 'test.jpg';
+ const fileType = 'image/jpeg';
+ const mockArrayBuffer = new ArrayBuffer(8);
+
+ (global.fetch as any).mockResolvedValue({
+ arrayBuffer: () => Promise.resolve(mockArrayBuffer),
+ });
+
+ const result = await uploadService.getImageFileByUrlWithCORS(url, filename, fileType);
+
+ expect(result.type).toBe(fileType);
+ });
+ });
+});
diff --git a/src/services/file/ClientS3/index.test.ts b/src/services/file/ClientS3/index.test.ts
new file mode 100644
index 000000000000..266c82078ac4
--- /dev/null
+++ b/src/services/file/ClientS3/index.test.ts
@@ -0,0 +1,115 @@
+import { createStore, del, get, set } from 'idb-keyval';
+import { beforeEach, describe, expect, it, vi } from 'vitest';
+
+import { BrowserS3Storage } from './index';
+
+// Mock idb-keyval
+vi.mock('idb-keyval', () => ({
+ createStore: vi.fn(),
+ set: vi.fn(),
+ get: vi.fn(),
+ del: vi.fn(),
+}));
+
+let storage: BrowserS3Storage;
+let mockStore = {};
+
+beforeEach(() => {
+ // Reset all mocks before each test
+ vi.clearAllMocks();
+ mockStore = {};
+ (createStore as any).mockReturnValue(mockStore);
+ storage = new BrowserS3Storage();
+});
+
+describe('BrowserS3Storage', () => {
+ describe('constructor', () => {
+ it('should create store when in browser environment', () => {
+ expect(createStore).toHaveBeenCalledWith('lobechat-local-s3', 'objects');
+ });
+ });
+
+ describe('putObject', () => {
+ it('should successfully put a file object', async () => {
+ const mockFile = new File(['test content'], 'test.txt', { type: 'text/plain' });
+ const mockArrayBuffer = new ArrayBuffer(8);
+ vi.spyOn(mockFile, 'arrayBuffer').mockResolvedValue(mockArrayBuffer);
+ (set as any).mockResolvedValue(undefined);
+
+ await storage.putObject('1-test-key', mockFile);
+
+ expect(set).toHaveBeenCalledWith(
+ '1-test-key',
+ {
+ data: mockArrayBuffer,
+ name: 'test.txt',
+ type: 'text/plain',
+ },
+ mockStore,
+ );
+ });
+
+ it('should throw error when put operation fails', async () => {
+ const mockFile = new File(['test content'], 'test.txt', { type: 'text/plain' });
+ const mockError = new Error('Storage error');
+ (set as any).mockRejectedValue(mockError);
+
+ await expect(storage.putObject('test-key', mockFile)).rejects.toThrow(
+ 'Failed to put file test.txt: Storage error',
+ );
+ });
+ });
+
+ describe('getObject', () => {
+ it('should successfully get a file object', async () => {
+ const mockData = {
+ data: new ArrayBuffer(8),
+ name: 'test.txt',
+ type: 'text/plain',
+ };
+ (get as any).mockResolvedValue(mockData);
+
+ const result = await storage.getObject('test-key');
+
+ expect(result).toBeInstanceOf(File);
+ expect(result?.name).toBe('test.txt');
+ expect(result?.type).toBe('text/plain');
+ });
+
+ it('should return undefined when file not found', async () => {
+ (get as any).mockResolvedValue(undefined);
+
+ const result = await storage.getObject('test-key');
+
+ expect(result).toBeUndefined();
+ });
+
+ it('should throw error when get operation fails', async () => {
+ const mockError = new Error('Storage error');
+ (get as any).mockRejectedValue(mockError);
+
+ await expect(storage.getObject('test-key')).rejects.toThrow(
+ 'Failed to get object (key=test-key): Storage error',
+ );
+ });
+ });
+
+ describe('deleteObject', () => {
+ it('should successfully delete a file object', async () => {
+ (del as any).mockResolvedValue(undefined);
+
+ await storage.deleteObject('test-key2');
+
+ expect(del).toHaveBeenCalledWith('test-key2', {});
+ });
+
+ it('should throw error when delete operation fails', async () => {
+ const mockError = new Error('Storage error');
+ (del as any).mockRejectedValue(mockError);
+
+ await expect(storage.deleteObject('test-key')).rejects.toThrow(
+ 'Failed to delete object (key=test-key): Storage error',
+ );
+ });
+ });
+});
diff --git a/src/services/file/ClientS3/index.ts b/src/services/file/ClientS3/index.ts
new file mode 100644
index 000000000000..94692f0ecb05
--- /dev/null
+++ b/src/services/file/ClientS3/index.ts
@@ -0,0 +1,58 @@
+import { createStore, del, get, set } from 'idb-keyval';
+
+const BROWSER_S3_DB_NAME = 'lobechat-local-s3';
+
+export class BrowserS3Storage {
+ private store;
+
+ constructor() {
+ // skip server-side rendering
+ if (typeof window === 'undefined') return;
+
+ this.store = createStore(BROWSER_S3_DB_NAME, 'objects');
+ }
+
+ /**
+ * 上传文件
+ * @param key 文件 hash
+ * @param file File 对象
+ */
+ async putObject(key: string, file: File): Promise {
+ try {
+ const data = await file.arrayBuffer();
+ await set(key, { data, name: file.name, type: file.type }, this.store);
+ } catch (e) {
+ throw new Error(`Failed to put file ${file.name}: ${(e as Error).message}`);
+ }
+ }
+
+ /**
+ * 获取文件
+ * @param key 文件 hash
+ * @returns File 对象
+ */
+ async getObject(key: string): Promise {
+ try {
+ const res = await get<{ data: ArrayBuffer; name: string; type: string }>(key, this.store);
+ if (!res) return;
+
+ return new File([res.data], res!.name, { type: res?.type });
+ } catch (e) {
+ throw new Error(`Failed to get object (key=${key}): ${(e as Error).message}`);
+ }
+ }
+
+ /**
+ * 删除文件
+ * @param key 文件 hash
+ */
+ async deleteObject(key: string): Promise {
+ try {
+ await del(key, this.store);
+ } catch (e) {
+ throw new Error(`Failed to delete object (key=${key}): ${(e as Error).message}`);
+ }
+ }
+}
+
+export const clientS3Storage = new BrowserS3Storage();
diff --git a/src/services/file/client.test.ts b/src/services/file/client.test.ts
index 636db762c4cf..44850d3f2f13 100644
--- a/src/services/file/client.test.ts
+++ b/src/services/file/client.test.ts
@@ -3,6 +3,7 @@ import { Mock, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest';
import { fileEnv } from '@/config/file';
import { FileModel } from '@/database/_deprecated/models/file';
import { DB_File } from '@/database/_deprecated/schemas/files';
+import { clientS3Storage } from '@/services/file/ClientS3';
import { serverConfigSelectors } from '@/store/serverConfig/selectors';
import { createServerConfigStore } from '@/store/serverConfig/store';
@@ -45,19 +46,23 @@ beforeEach(() => {
describe('FileService', () => {
it('createFile should save the file to the database', async () => {
- const localFile: DB_File = {
+ const localFile = {
name: 'test',
- data: new ArrayBuffer(1),
fileType: 'image/png',
- saveMode: 'local',
+ url: 'client-s3://123',
size: 1,
+ hash: '123',
};
+ await clientS3Storage.putObject(
+ '123',
+ new File([new ArrayBuffer(1)], 'test.png', { type: 'image/png' }),
+ );
+
(FileModel.create as Mock).mockResolvedValue(localFile);
const result = await fileService.createFile(localFile);
- expect(FileModel.create).toHaveBeenCalledWith(localFile);
expect(result).toEqual({ url: 'data:image/png;base64,AA==' });
});
diff --git a/src/services/file/client.ts b/src/services/file/client.ts
index babfa78322c3..6f9ef9e968a5 100644
--- a/src/services/file/client.ts
+++ b/src/services/file/client.ts
@@ -1,16 +1,27 @@
import { FileModel } from '@/database/_deprecated/models/file';
-import { DB_File } from '@/database/_deprecated/schemas/files';
-import { FileItem } from '@/types/files';
+import { clientS3Storage } from '@/services/file/ClientS3';
+import { FileItem, UploadFileParams } from '@/types/files';
import { IFileService } from './type';
export class ClientService implements IFileService {
- async createFile(file: DB_File) {
+ async createFile(file: UploadFileParams) {
// save to local storage
// we may want to save to a remote server later
- const res = await FileModel.create(file);
- // arrayBuffer to url
- const base64 = Buffer.from(file.data!).toString('base64');
+ const res = await FileModel.create({
+ createdAt: Date.now(),
+ data: undefined,
+ fileHash: file.hash,
+ fileType: file.fileType,
+ metadata: file.metadata,
+ name: file.name,
+ saveMode: 'url',
+ size: file.size,
+ url: file.url,
+ } as any);
+
+ // get file to base64 url
+ const base64 = await this.getBase64ByFileHash(file.hash!);
return {
id: res.id,
@@ -18,14 +29,24 @@ export class ClientService implements IFileService {
};
}
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
+ async checkFileHash(_hash: string) {
+ return { isExist: false, metadata: {} };
+ }
+
async getFile(id: string): Promise {
const item = await FileModel.findById(id);
if (!item) {
throw new Error('file not found');
}
- // arrayBuffer to url
- const url = URL.createObjectURL(new Blob([item.data!], { type: item.fileType }));
+ // arrayBuffer to blob or base64 to blob
+ const blob = !!item.data
+ ? new Blob([item.data!], { type: item.fileType })
+ : // @ts-ignore
+ new Blob([Buffer.from(item.base64!, 'base64')], { type: item.fileType });
+
+ const url = URL.createObjectURL(blob);
return {
createdAt: new Date(item.createdAt),
@@ -49,4 +70,11 @@ export class ClientService implements IFileService {
async removeAllFiles() {
return FileModel.clear();
}
+
+ private async getBase64ByFileHash(hash: string) {
+ const fileItem = await clientS3Storage.getObject(hash);
+ if (!fileItem) throw new Error('file not found');
+
+ return Buffer.from(await fileItem.arrayBuffer()).toString('base64');
+ }
}
diff --git a/src/services/upload.ts b/src/services/upload.ts
index 38f6cdd2959b..64fec769ecad 100644
--- a/src/services/upload.ts
+++ b/src/services/upload.ts
@@ -1,7 +1,8 @@
import { fileEnv } from '@/config/file';
import { edgeClient } from '@/libs/trpc/client';
import { API_ENDPOINTS } from '@/services/_url';
-import { FileMetadata, UploadFileParams } from '@/types/files';
+import { clientS3Storage } from '@/services/file/ClientS3';
+import { FileMetadata } from '@/types/files';
import { FileUploadState, FileUploadStatus } from '@/types/files/upload';
import { uuid } from '@/utils/uuid';
@@ -66,23 +67,14 @@ class UploadService {
return result;
};
- uploadToClientDB = async (params: UploadFileParams, file: File) => {
- const { FileModel } = await import('@/database/_deprecated/models/file');
- const fileArrayBuffer = await file.arrayBuffer();
-
- // save to local storage
- // we may want to save to a remote server later
- const res = await FileModel.create({
- createdAt: Date.now(),
- ...params,
- data: fileArrayBuffer,
- });
- // arrayBuffer to url
- const base64 = Buffer.from(fileArrayBuffer).toString('base64');
+ uploadToClientS3 = async (hash: string, file: File): Promise => {
+ await clientS3Storage.putObject(hash, file);
return {
- id: res.id,
- url: `data:${params.fileType};base64,${base64}`,
+ date: (Date.now() / 1000 / 60 / 60).toFixed(0),
+ dirname: '',
+ filename: file.name,
+ path: `client-s3://${hash}`,
};
};
diff --git a/src/store/chat/slices/builtinTool/action.test.ts b/src/store/chat/slices/builtinTool/action.test.ts
index 795806b44195..63bc5b990a55 100644
--- a/src/store/chat/slices/builtinTool/action.test.ts
+++ b/src/store/chat/slices/builtinTool/action.test.ts
@@ -2,6 +2,8 @@ import { act, renderHook } from '@testing-library/react';
import { describe, expect, it, vi } from 'vitest';
import { fileService } from '@/services/file';
+import { ClientService } from '@/services/file/client';
+import { messageService } from '@/services/message';
import { imageGenerationService } from '@/services/textToImage';
import { uploadService } from '@/services/upload';
import { chatSelectors } from '@/store/chat/selectors';
@@ -39,17 +41,23 @@ describe('chatToolSlice', () => {
vi.spyOn(uploadService, 'getImageFileByUrlWithCORS').mockResolvedValue(
new File(['1'], 'file.png', { type: 'image/png' }),
);
- vi.spyOn(uploadService, 'uploadToClientDB').mockResolvedValue({} as any);
- vi.spyOn(fileService, 'createFile').mockResolvedValue({ id: mockId, url: '' });
+ vi.spyOn(uploadService, 'uploadToClientS3').mockResolvedValue({} as any);
+ vi.spyOn(ClientService.prototype, 'createFile').mockResolvedValue({
+ id: mockId,
+ url: '',
+ });
vi.spyOn(result.current, 'toggleDallEImageLoading');
+ vi.spyOn(ClientService.prototype, 'checkFileHash').mockImplementation(async () => ({
+ isExist: false,
+ metadata: {},
+ }));
await act(async () => {
await result.current.generateImageFromPrompts(prompts, messageId);
});
// For each prompt, loading is toggled on and then off
expect(imageGenerationService.generateImage).toHaveBeenCalledTimes(prompts.length);
- expect(uploadService.uploadToClientDB).toHaveBeenCalledTimes(prompts.length);
-
+ expect(uploadService.uploadToClientS3).toHaveBeenCalledTimes(prompts.length);
expect(result.current.toggleDallEImageLoading).toHaveBeenCalledTimes(prompts.length * 2);
});
});
@@ -75,6 +83,7 @@ describe('chatToolSlice', () => {
content: initialMessageContent,
}) as ChatMessage,
);
+ vi.spyOn(messageService, 'updateMessage').mockResolvedValueOnce(undefined);
await act(async () => {
await result.current.updateImageItem(messageId, updateFunction);
diff --git a/src/store/file/slices/upload/action.ts b/src/store/file/slices/upload/action.ts
index 1134d7907f43..3f73de77b08b 100644
--- a/src/store/file/slices/upload/action.ts
+++ b/src/store/file/slices/upload/action.ts
@@ -6,14 +6,11 @@ import { message } from '@/components/AntdStaticMethods';
import { LOBE_CHAT_CLOUD } from '@/const/branding';
import { isServerMode } from '@/const/version';
import { fileService } from '@/services/file';
-import { ServerService } from '@/services/file/server';
import { uploadService } from '@/services/upload';
import { FileMetadata, UploadFileItem } from '@/types/files';
import { FileStore } from '../../store';
-const serverFileService = new ServerService();
-
interface UploadWithProgressParams {
file: File;
knowledgeBaseId?: string;
@@ -43,10 +40,6 @@ interface UploadWithProgressResult {
}
export interface FileUploadAction {
- internal_uploadToClientDB: (
- params: Omit,
- ) => Promise;
- internal_uploadToServer: (params: UploadWithProgressParams) => Promise;
uploadWithProgress: (
params: UploadWithProgressParams,
) => Promise;
@@ -57,51 +50,14 @@ export const createFileUploadSlice: StateCreator<
[['zustand/devtools', never]],
[],
FileUploadAction
-> = (set, get) => ({
- internal_uploadToClientDB: async ({ file, onStatusUpdate, skipCheckFileType }) => {
- if (!skipCheckFileType && !file.type.startsWith('image')) {
- onStatusUpdate?.({ id: file.name, type: 'removeFile' });
- message.info({
- content: t('upload.fileOnlySupportInServerMode', {
- cloud: LOBE_CHAT_CLOUD,
- ext: file.name.split('.').pop(),
- ns: 'error',
- }),
- duration: 5,
- });
- return;
- }
-
- const fileArrayBuffer = await file.arrayBuffer();
-
- const hash = sha256(fileArrayBuffer);
-
- const data = await uploadService.uploadToClientDB(
- { fileType: file.type, hash, name: file.name, saveMode: 'local', size: file.size },
- file,
- );
-
- onStatusUpdate?.({
- id: file.name,
- type: 'updateFile',
- value: {
- fileUrl: data.url,
- id: data.id,
- status: 'success',
- uploadState: { progress: 100, restTime: 0, speed: 0 },
- },
- });
-
- return data;
- },
-
- internal_uploadToServer: async ({ file, onStatusUpdate, knowledgeBaseId }) => {
+> = () => ({
+ uploadWithProgress: async ({ file, onStatusUpdate, knowledgeBaseId, skipCheckFileType }) => {
const fileArrayBuffer = await file.arrayBuffer();
// 1. check file hash
const hash = sha256(fileArrayBuffer);
- const checkStatus = await serverFileService.checkFileHash(hash);
+ const checkStatus = await fileService.checkFileHash(hash);
let metadata: FileMetadata;
// 2. if file exist, just skip upload
@@ -112,17 +68,37 @@ export const createFileUploadSlice: StateCreator<
type: 'updateFile',
value: { status: 'processing', uploadState: { progress: 100, restTime: 0, speed: 0 } },
});
- } else {
- // 2. if file don't exist, need upload files
- metadata = await uploadService.uploadWithProgress(file, {
- onProgress: (status, upload) => {
- onStatusUpdate?.({
- id: file.name,
- type: 'updateFile',
- value: { status: status === 'success' ? 'processing' : status, uploadState: upload },
+ }
+ // 2. if file don't exist, need upload files
+ else {
+ // if is server mode, upload to server s3, or upload to client s3
+ if (isServerMode) {
+ metadata = await uploadService.uploadWithProgress(file, {
+ onProgress: (status, upload) => {
+ onStatusUpdate?.({
+ id: file.name,
+ type: 'updateFile',
+ value: { status: status === 'success' ? 'processing' : status, uploadState: upload },
+ });
+ },
+ });
+ } else {
+ if (!skipCheckFileType && !file.type.startsWith('image')) {
+ onStatusUpdate?.({ id: file.name, type: 'removeFile' });
+ message.info({
+ content: t('upload.fileOnlySupportInServerMode', {
+ cloud: LOBE_CHAT_CLOUD,
+ ext: file.name.split('.').pop(),
+ ns: 'error',
+ }),
+ duration: 5,
});
- },
- });
+ return;
+ }
+
+ // Upload to the indexeddb in the browser
+ metadata = await uploadService.uploadToClientS3(hash, file);
+ }
}
// 3. use more powerful file type detector to get file type
@@ -138,12 +114,10 @@ export const createFileUploadSlice: StateCreator<
// 4. create file to db
const data = await fileService.createFile(
{
- createdAt: Date.now(),
fileType,
hash,
metadata,
name: file.name,
- saveMode: 'url',
size: file.size,
url: metadata.path,
},
@@ -163,12 +137,4 @@ export const createFileUploadSlice: StateCreator<
return data;
},
-
- uploadWithProgress: async (payload) => {
- const { internal_uploadToServer, internal_uploadToClientDB } = get();
-
- if (isServerMode) return internal_uploadToServer(payload);
-
- return internal_uploadToClientDB(payload);
- },
});
diff --git a/src/types/files/upload.ts b/src/types/files/upload.ts
index 83aa3d5be9ce..b94b69a5fdee 100644
--- a/src/types/files/upload.ts
+++ b/src/types/files/upload.ts
@@ -53,7 +53,6 @@ export const FileMetadataSchema = z.object({
export type FileMetadata = z.infer;
export const UploadFileSchema = z.object({
- data: z.instanceof(ArrayBuffer).optional(),
/**
* file type
* @example 'image/png'
@@ -77,7 +76,6 @@ export const UploadFileSchema = z.object({
* local mean save the raw file into data
* url mean upload the file to a cdn and then save the url
*/
- saveMode: z.enum(['local', 'url']),
/**
* file size
*/
@@ -89,3 +87,11 @@ export const UploadFileSchema = z.object({
});
export type UploadFileParams = z.infer;
+
+export interface CheckFileHashResult {
+ fileType?: string;
+ isExist: boolean;
+ metadata?: unknown;
+ size?: number;
+ url?: string;
+}