diff --git a/packages/filesystem/auth.test.ts b/packages/filesystem/auth.test.ts index 9dc1b0b43..bf18a4d5d 100644 --- a/packages/filesystem/auth.test.ts +++ b/packages/filesystem/auth.test.ts @@ -68,4 +68,38 @@ describe("AuthVerify", () => { ).resolves.toEqual(["new-access", "new-access", "new-access"]); expect(fetchMock).toHaveBeenCalledTimes(1); }); + + it("concurrent initial token verification should share one auth request and save once", async () => { + vi.useFakeTimers(); + const createSpy = vi.spyOn(chrome.tabs, "create").mockImplementation(() => Promise.resolve({ id: 1 }) as any); + const originalGet = (chrome.tabs as any).get; + (chrome.tabs as any).get = vi.fn().mockRejectedValue(new Error("closed")); + const saveSpy = vi.spyOn(LocalStorageDAO.prototype, "saveValue"); + const fetchMock = vi.fn().mockResolvedValue({ + json: vi.fn().mockResolvedValue({ + code: 0, + data: { + token: { + access_token: "initial-access", + refresh_token: "initial-refresh", + }, + }, + }), + } as unknown as Response); + vi.stubGlobal("fetch", fetchMock); + + try { + const auth = Promise.all([AuthVerify("onedrive"), AuthVerify("onedrive"), AuthVerify("onedrive")]); + await vi.advanceTimersByTimeAsync(1000); + + await expect(auth).resolves.toEqual(["initial-access", "initial-access", "initial-access"]); + expect(createSpy).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(saveSpy).toHaveBeenCalledTimes(1); + expect(saveSpy).toHaveBeenCalledWith(key, expect.objectContaining({ accessToken: "initial-access" })); + } finally { + (chrome.tabs as any).get = originalGet; + vi.useRealTimers(); + } + }); }); diff --git a/packages/filesystem/auth.ts b/packages/filesystem/auth.ts index dd4def31f..43eb2b766 100644 --- a/packages/filesystem/auth.ts +++ b/packages/filesystem/auth.ts @@ -74,6 +74,7 @@ export type Token = { createtime: number; }; const refreshTokenPromises: Partial>> = {}; +const authTokenPromises: Partial>> = {}; function refreshAccessToken( netDiskType: NetDiskType, @@ -126,19 +127,30 @@ export async function AuthVerify(netDiskType: NetDiskType, invalid?: boolean) { } // token不存在,或者没有accessToken,重新获取 if (!token || !token.accessToken) { - // 强制重新获取token - await NetDisk(netDiskType); - const resp = await GetNetDiskToken(netDiskType); - if (resp.code !== 0) { - throw new WarpTokenError(new Error(resp.msg)); + if (!authTokenPromises[netDiskType]) { + const authPromise = (async () => { + // 强制重新获取token + await NetDisk(netDiskType); + const resp = await GetNetDiskToken(netDiskType); + if (resp.code !== 0) { + throw new WarpTokenError(new Error(resp.msg)); + } + const newToken = { + accessToken: resp.data.token.access_token, + refreshToken: resp.data.token.refresh_token, + createtime: Date.now(), + }; + await localStorageDAO.saveValue(key, newToken); + return newToken; + })().finally(() => { + if (authTokenPromises[netDiskType] === authPromise) { + delete authTokenPromises[netDiskType]; + } + }); + authTokenPromises[netDiskType] = authPromise; } - token = { - accessToken: resp.data.token.access_token, - refreshToken: resp.data.token.refresh_token, - createtime: Date.now(), - }; + token = await authTokenPromises[netDiskType]; invalid = false; - await localStorageDAO.saveValue(key, token); } // token未过期(一小时内)及有效则保留,不用刷新token const unexpired = Date.now() < token.createtime + 3600000; diff --git a/packages/filesystem/baidu/baidu.test.ts b/packages/filesystem/baidu/baidu.test.ts index 4ca661f89..80ef006db 100644 --- a/packages/filesystem/baidu/baidu.test.ts +++ b/packages/filesystem/baidu/baidu.test.ts @@ -32,4 +32,167 @@ describe("BaiduFileSystem", () => { ); expect(updateDynamicRulesMock).not.toHaveBeenCalled(); }); + + it("create should reject expectedVersion as unsupported", async () => { + const fs = new BaiduFileSystem("/apps", "token"); + + await expect(fs.create("test.txt", { expectedVersion: "version" })).rejects.toMatchObject({ + provider: "baidu", + unsupported: true, + }); + }); + + it("writer should reject createOnly when target already exists", async () => { + const fs = new BaiduFileSystem("/apps", "token"); + vi.spyOn(fs, "list").mockResolvedValue([ + { + name: "test.txt", + path: "/apps", + size: 1, + digest: "md5", + createtime: 1, + updatetime: 1, + }, + ]); + + const writer = await fs.create("test.txt", { createOnly: true }); + + await expect(writer.write("content")).rejects.toMatchObject({ + provider: "baidu", + conflict: true, + }); + }); + + it("writer should ask Baidu to fail server-side createOnly collisions", async () => { + const fs = new BaiduFileSystem("/apps", "token"); + vi.spyOn(fs, "list").mockResolvedValue([]); + const requestSpy = vi + .spyOn(fs, "request") + .mockResolvedValueOnce({ errno: 0, uploadid: "upload-id" }) + .mockResolvedValueOnce({ errno: 0 }) + .mockResolvedValueOnce({ errno: 0 }); + + const writer = await fs.create("test.txt", { createOnly: true }); + + await expect(writer.write("content")).resolves.toBeUndefined(); + + expect(String((requestSpy.mock.calls[0][1] as RequestInit).body)).toContain("rtype=0"); + expect(String((requestSpy.mock.calls[2][1] as RequestInit).body)).toContain("rtype=0"); + }); + + it("writer should surface Baidu createOnly rejection as conflict", async () => { + const fs = new BaiduFileSystem("/apps", "token"); + vi.spyOn(fs, "list").mockResolvedValue([]); + vi.spyOn(fs, "request").mockResolvedValueOnce({ errno: -8, errmsg: "file exists" }); + + const writer = await fs.create("test.txt", { createOnly: true }); + + await expect(writer.write("content")).rejects.toMatchObject({ + provider: "baidu", + conflict: true, + }); + }); + + it("writer should reject expectedDigest when remote digest changed", async () => { + const fs = new BaiduFileSystem("/apps", "token"); + vi.spyOn(fs, "list").mockResolvedValue([ + { + name: "test.txt", + path: "/apps", + size: 1, + digest: "new-md5", + createtime: 1, + updatetime: 1, + }, + ]); + + const writer = await fs.create("test.txt", { expectedDigest: "old-md5" }); + + await expect(writer.write("content")).rejects.toMatchObject({ + provider: "baidu", + conflict: true, + }); + }); + + it("writer should allow best-effort expectedDigest when remote digest still matches", async () => { + const fs = new BaiduFileSystem("/apps", "token"); + vi.spyOn(fs, "list").mockResolvedValue([ + { + name: "test.txt", + path: "/apps", + size: 1, + digest: "old-md5", + createtime: 1, + updatetime: 1, + }, + ]); + const requestSpy = vi + .spyOn(fs, "request") + .mockResolvedValueOnce({ errno: 0, uploadid: "upload-id" }) + .mockResolvedValueOnce({ errno: 0 }) + .mockResolvedValueOnce({ errno: 0 }); + + const writer = await fs.create("test.txt", { expectedDigest: "old-md5" }); + + await expect(writer.write("content")).resolves.toBeUndefined(); + expect(requestSpy).toHaveBeenCalledTimes(3); + expect(String((requestSpy.mock.calls[0][1] as RequestInit).body)).toContain("rtype=3"); + expect(String((requestSpy.mock.calls[2][1] as RequestInit).body)).toContain("rtype=3"); + }); + + it("delete should be idempotent when Baidu reports file missing", async () => { + const fetchMock = vi.fn().mockResolvedValue({ + json: async () => ({ errno: -9 }), + }); + vi.stubGlobal("fetch", fetchMock); + const fs = new BaiduFileSystem("/apps", "token"); + + await expect(fs.delete("missing.txt")).resolves.toBeUndefined(); + }); + + it("delete should reject expectedVersion as unsupported", async () => { + const fs = new BaiduFileSystem("/apps", "token"); + + await expect(fs.delete("test.txt", { expectedVersion: "version" })).rejects.toMatchObject({ + provider: "baidu", + unsupported: true, + }); + }); + + it("delete should reject expectedDigest when remote digest changed", async () => { + const fs = new BaiduFileSystem("/apps", "token"); + vi.spyOn(fs, "list").mockResolvedValue([ + { + name: "test.txt", + path: "/apps", + size: 1, + digest: "new-md5", + createtime: 1, + updatetime: 1, + }, + ]); + + await expect(fs.delete("test.txt", { expectedDigest: "old-md5" })).rejects.toMatchObject({ + provider: "baidu", + conflict: true, + }); + }); + + it("delete should allow best-effort expectedDigest when remote digest still matches", async () => { + const fs = new BaiduFileSystem("/apps", "token"); + vi.spyOn(fs, "list").mockResolvedValue([ + { + name: "test.txt", + path: "/apps", + size: 1, + digest: "old-md5", + createtime: 1, + updatetime: 1, + }, + ]); + const requestSpy = vi.spyOn(fs, "request").mockResolvedValue({ errno: 0 }); + + await expect(fs.delete("test.txt", { expectedDigest: "old-md5" })).resolves.toBeUndefined(); + expect(requestSpy).toHaveBeenCalledTimes(1); + }); }); diff --git a/packages/filesystem/baidu/baidu.ts b/packages/filesystem/baidu/baidu.ts index 9184de2f0..caaec6040 100644 --- a/packages/filesystem/baidu/baidu.ts +++ b/packages/filesystem/baidu/baidu.ts @@ -1,6 +1,7 @@ import { AuthVerify } from "../auth"; +import { fileConflictError, unsupportedConditionalWriteError } from "../error"; import type FileSystem from "../filesystem"; -import type { FileInfo, FileCreateOptions, FileReader, FileWriter } from "../filesystem"; +import type { FileInfo, FileCreateOptions, FileDeleteOptions, FileReader, FileWriter } from "../filesystem"; import { joinPath } from "../utils"; import { BaiduFileReader, BaiduFileWriter } from "./rw"; @@ -29,8 +30,14 @@ export default class BaiduFileSystem implements FileSystem { return new BaiduFileSystem(joinPath(this.path, path), this.accessToken); } - async create(path: string, _opts?: FileCreateOptions): Promise { - return new BaiduFileWriter(this, joinPath(this.path, path)); + async create(path: string, opts?: FileCreateOptions): Promise { + if (opts?.expectedVersion) { + throw unsupportedConditionalWriteError( + "baidu", + "Baidu filesystem does not expose a version token for conditional writes" + ); + } + return new BaiduFileWriter(this, joinPath(this.path, path), opts); } async createDir(dir: string, _opts?: FileCreateOptions): Promise { @@ -82,23 +89,43 @@ export default class BaiduFileSystem implements FileSystem { }); } - delete(path: string): Promise { + async delete(path: string, opts?: FileDeleteOptions): Promise { + if (opts?.expectedVersion) { + throw unsupportedConditionalWriteError( + "baidu", + "Baidu filesystem does not expose a version token for conditional deletes" + ); + } + if (opts?.expectedDigest) { + // 百度网盘删除接口不支持服务端 If-Match/CAS,只能先 list 比对 digest 再删除。 + // 这只能降低 stale 删除风险,不能关闭“检查后、删除前被其他设备更新”的 TOCTOU 窗口。 + // 典型残留窗口:A list 通过后,B 更新同名文件,A 随后 delete 仍可能删除 B 的新内容。 + const targetName = path.substring(path.lastIndexOf("/") + 1); + const existing = (await this.list()).find((file) => file.name === targetName); + if (existing && existing.digest !== opts.expectedDigest) { + throw fileConflictError("baidu", `Baidu file digest changed before delete: ${path}`, { + status: 412, + code: "digestMismatch", + }); + } + } const filelist = [joinPath(this.path, path)]; const myHeaders = new Headers(); myHeaders.append("Content-Type", "application/x-www-form-urlencoded"); - return this.request( + const data = await this.request( `https://pan.baidu.com/rest/2.0/xpan/file?method=filemanager&access_token=${this.accessToken}&opera=delete`, { method: "POST", body: `async=0&filelist=${encodeURIComponent(JSON.stringify(filelist))}`, headers: myHeaders, } - ).then((data) => { - if (data.errno) { - throw new Error(JSON.stringify(data)); + ); + if (data.errno) { + if (data.errno === -9 || data.errno === 12) { + return; } - return data; - }); + throw new Error(JSON.stringify(data)); + } } async list(): Promise { diff --git a/packages/filesystem/baidu/rw.ts b/packages/filesystem/baidu/rw.ts index df2e54519..194934d64 100644 --- a/packages/filesystem/baidu/rw.ts +++ b/packages/filesystem/baidu/rw.ts @@ -1,4 +1,5 @@ -import type { FileInfo, FileReader, FileWriter } from "../filesystem"; +import { fileConflictError } from "../error"; +import type { FileCreateOptions, FileInfo, FileReader, FileWriter } from "../filesystem"; import { calculateMd5, md5OfText } from "@App/pkg/utils/crypto"; import type BaiduFileSystem from "./baidu"; @@ -38,9 +39,12 @@ export class BaiduFileWriter implements FileWriter { fs: BaiduFileSystem; - constructor(fs: BaiduFileSystem, path: string) { + opts?: FileCreateOptions; + + constructor(fs: BaiduFileSystem, path: string, opts?: FileCreateOptions) { this.fs = fs; this.path = path; + this.opts = opts; } size(content: string | Blob) { @@ -58,6 +62,8 @@ export class BaiduFileWriter implements FileWriter { } async write(content: string | Blob): Promise { + await this.checkWritePrecondition(); + // 预上传获取id const size = this.size(content).toString(); const md5 = await this.md5(content); @@ -67,7 +73,7 @@ export class BaiduFileWriter implements FileWriter { urlencoded.append("size", size); urlencoded.append("isdir", "0"); urlencoded.append("autoinit", "1"); - urlencoded.append("rtype", "3"); + urlencoded.append("rtype", this.opts?.createOnly ? "0" : "3"); urlencoded.append("block_list", JSON.stringify(blockList)); const myHeaders = new Headers(); myHeaders.append("Content-Type", "application/x-www-form-urlencoded"); @@ -80,6 +86,7 @@ export class BaiduFileWriter implements FileWriter { } ); if (data.errno) { + this.throwCreateOnlyConflict(data); throw new Error(JSON.stringify(data)); } const uploadid = data.uploadid; @@ -102,6 +109,7 @@ export class BaiduFileWriter implements FileWriter { } ); if (data.errno) { + this.throwCreateOnlyConflict(data); throw new Error(JSON.stringify(data)); } // 创建文件 @@ -111,7 +119,7 @@ export class BaiduFileWriter implements FileWriter { urlencoded.append("isdir", "0"); urlencoded.append("block_list", JSON.stringify(blockList)); urlencoded.append("uploadid", uploadid); - urlencoded.append("rtype", "3"); + urlencoded.append("rtype", this.opts?.createOnly ? "0" : "3"); data = await this.fs.request( `https://pan.baidu.com/rest/2.0/xpan/file?method=create&access_token=${this.fs.accessToken}`, { @@ -121,7 +129,46 @@ export class BaiduFileWriter implements FileWriter { } ); if (data.errno) { + this.throwCreateOnlyConflict(data); throw new Error(JSON.stringify(data)); } } + + private throwCreateOnlyConflict(data: any): void { + if (!this.opts?.createOnly) { + return; + } + throw fileConflictError("baidu", `File already exists or createOnly write was rejected: ${this.path}`, { + status: 409, + code: String(data.errno), + raw: data, + }); + } + + private async checkWritePrecondition(): Promise { + if (!this.opts?.expectedDigest && !this.opts?.createOnly) { + return; + } + const targetName = this.path.substring(this.path.lastIndexOf("/") + 1); + const existing = (await this.fs.list()).find((file) => file.name === targetName); + + if (this.opts?.createOnly) { + if (existing) { + throw fileConflictError("baidu", `File already exists: ${this.path}`, { + status: 409, + code: "nameAlreadyExists", + }); + } + return; + } + + // 百度网盘没有原子 compare-and-swap 上传能力;这个 digest 检查只是 best-effort。 + // 它只能在上传前发现本地快照已过期;createOnly 仍依赖服务端 rtype=0 来拒绝同名覆盖。 + if (this.opts?.expectedDigest && existing?.digest !== this.opts.expectedDigest) { + throw fileConflictError("baidu", `Baidu file digest changed before write: ${this.path}`, { + status: 412, + code: "digestMismatch", + }); + } + } } diff --git a/packages/filesystem/dropbox/dropbox.test.ts b/packages/filesystem/dropbox/dropbox.test.ts index 43a239127..7fc8b7aa8 100644 --- a/packages/filesystem/dropbox/dropbox.test.ts +++ b/packages/filesystem/dropbox/dropbox.test.ts @@ -15,6 +15,27 @@ describe("DropboxFileSystem", () => { await expect(fs.delete("missing.txt")).resolves.toBeUndefined(); }); + it("delete should check rev before conditional delete", async () => { + const fs = new DropboxFileSystem("/", "token"); + const requestSpy = vi.spyOn(fs, "request").mockResolvedValueOnce({ rev: "rev-1" }).mockResolvedValueOnce({}); + + await expect(fs.delete("test.txt", { expectedVersion: "rev-1" })).resolves.toBeUndefined(); + + expect(requestSpy).toHaveBeenCalledTimes(2); + expect(requestSpy.mock.calls[0][0]).toBe("https://api.dropboxapi.com/2/files/get_metadata"); + expect(requestSpy.mock.calls[1][0]).toBe("https://api.dropboxapi.com/2/files/delete_v2"); + }); + + it("delete should reject when conditional rev changed", async () => { + const fs = new DropboxFileSystem("/", "token"); + vi.spyOn(fs, "request").mockResolvedValue({ rev: "rev-2" }); + + await expect(fs.delete("test.txt", { expectedVersion: "rev-1" })).rejects.toMatchObject({ + provider: "dropbox", + conflict: true, + }); + }); + it("exists should return false on path not found", async () => { const fs = new DropboxFileSystem("/", "token"); vi.spyOn(fs, "request").mockRejectedValue( @@ -30,4 +51,87 @@ describe("DropboxFileSystem", () => { await expect(fs.exists("/test.txt")).rejects.toThrow("invalid_access_token"); }); + + it("list should expose Dropbox rev as version", async () => { + const fs = new DropboxFileSystem("/", "token"); + vi.spyOn(fs, "request").mockResolvedValue({ + entries: [ + { + ".tag": "file", + name: "test.user.js", + size: 1, + content_hash: "hash-1", + rev: "rev-1", + client_modified: "2024-01-01T00:00:00.000Z", + server_modified: "2024-01-02T00:00:00.000Z", + }, + ], + has_more: false, + }); + + await expect(fs.list()).resolves.toMatchObject([ + { + name: "test.user.js", + digest: "hash-1", + version: "rev-1", + }, + ]); + }); + + it("writer should use Dropbox update mode when expectedVersion is provided", async () => { + const fs = new DropboxFileSystem("/", "token"); + const requestSpy = vi.spyOn(fs, "request").mockResolvedValue({}); + + const writer = await fs.create("test.txt", { expectedVersion: "rev-1" }); + await writer.write("content"); + + const headers = (requestSpy.mock.calls[0][1] as RequestInit).headers as Headers; + expect(JSON.parse(headers.get("Dropbox-API-Arg")!)).toEqual({ + path: "/test.txt", + mode: { ".tag": "update", update: "rev-1" }, + autorename: false, + }); + }); + + it("writer should use add mode for createOnly without metadata preflight", async () => { + const fs = new DropboxFileSystem("/", "token"); + const existsSpy = vi.spyOn(fs, "exists"); + const requestSpy = vi.spyOn(fs, "request").mockResolvedValue({}); + + const writer = await fs.create("test.txt", { createOnly: true }); + await writer.write("content"); + + expect(existsSpy).not.toHaveBeenCalled(); + const headers = (requestSpy.mock.calls[0][1] as RequestInit).headers as Headers; + expect(JSON.parse(headers.get("Dropbox-API-Arg")!)).toMatchObject({ + path: "/test.txt", + mode: "add", + }); + }); + + it("writer should use overwrite mode for normal writes without metadata preflight", async () => { + const fs = new DropboxFileSystem("/", "token"); + const existsSpy = vi.spyOn(fs, "exists"); + const requestSpy = vi.spyOn(fs, "request").mockResolvedValue({}); + + const writer = await fs.create("test.txt"); + await writer.write("content"); + + expect(existsSpy).not.toHaveBeenCalled(); + const headers = (requestSpy.mock.calls[0][1] as RequestInit).headers as Headers; + expect(JSON.parse(headers.get("Dropbox-API-Arg")!)).toMatchObject({ + path: "/test.txt", + mode: "overwrite", + }); + }); + + it("writer should reject expectedDigest without Dropbox rev", async () => { + const fs = new DropboxFileSystem("/", "token"); + const writer = await fs.create("test.txt", { expectedDigest: "content-hash" }); + + await expect(writer.write("content")).rejects.toMatchObject({ + provider: "dropbox", + unsupported: true, + }); + }); }); diff --git a/packages/filesystem/dropbox/dropbox.ts b/packages/filesystem/dropbox/dropbox.ts index 46c264322..6c56d9add 100644 --- a/packages/filesystem/dropbox/dropbox.ts +++ b/packages/filesystem/dropbox/dropbox.ts @@ -1,6 +1,7 @@ import { AuthVerify } from "../auth"; +import { fileConflictError } from "../error"; import type FileSystem from "../filesystem"; -import type { FileInfo, FileCreateOptions, FileReader, FileWriter } from "../filesystem"; +import type { FileInfo, FileCreateOptions, FileDeleteOptions, FileReader, FileWriter } from "../filesystem"; import { joinPath } from "../utils"; import { DropboxFileReader, DropboxFileWriter } from "./rw"; @@ -37,8 +38,8 @@ export default class DropboxFileSystem implements FileSystem { return Promise.resolve(new DropboxFileSystem(joinPath(this.path, path), this.accessToken)); } - create(path: string, _opts?: FileCreateOptions): Promise { - return Promise.resolve(new DropboxFileWriter(this, joinPath(this.path, path))); + create(path: string, opts?: FileCreateOptions): Promise { + return Promise.resolve(new DropboxFileWriter(this, joinPath(this.path, path), opts)); } async createDir(dir: string, _opts?: FileCreateOptions): Promise { @@ -139,13 +140,14 @@ export default class DropboxFileSystem implements FileSystem { }); } - async delete(path: string): Promise { + async delete(path: string, opts?: FileDeleteOptions): Promise { const fullPath = joinPath(this.path, path); const myHeaders = new Headers(); myHeaders.append("Content-Type", "application/json"); try { + await this.assertDeletePrecondition(fullPath, opts); await this.request("https://api.dropboxapi.com/2/files/delete_v2", { method: "POST", headers: myHeaders, @@ -164,6 +166,32 @@ export default class DropboxFileSystem implements FileSystem { this.clearRelatedCache(fullPath); } + private async assertDeletePrecondition(path: string, opts?: FileDeleteOptions): Promise { + const expected = opts?.expectedVersion || opts?.expectedDigest; + if (!expected) { + return; + } + // Dropbox delete_v2 不接受 rev/content_hash 条件参数,这里只能先读 metadata 再删除。 + // 这不是原子删除:metadata 检查和 delete_v2 之间仍可能被其他设备更新。 + // 典型残留窗口:A get_metadata 通过后,B 更新文件,A 的 delete_v2 仍可能删除 B 的新内容。 + const myHeaders = new Headers(); + myHeaders.append("Content-Type", "application/json"); + const metadata = await this.request("https://api.dropboxapi.com/2/files/get_metadata", { + method: "POST", + headers: myHeaders, + body: JSON.stringify({ + path, + }), + }); + const current = opts?.expectedVersion ? metadata.rev : metadata.content_hash; + if (current !== expected) { + throw fileConflictError("dropbox", `Dropbox file changed before delete: ${path}`, { + status: 412, + code: "versionMismatch", + }); + } + } + async list(): Promise { let folderPath = this.path; @@ -207,6 +235,7 @@ export default class DropboxFileSystem implements FileSystem { path: this.path, size: item.size || 0, digest: item.content_hash || "", + version: item.rev || "", createtime: new Date(item.client_modified).getTime(), updatetime: new Date(item.server_modified).getTime(), }); diff --git a/packages/filesystem/dropbox/rw.ts b/packages/filesystem/dropbox/rw.ts index f63bfc2ea..0480bac1a 100644 --- a/packages/filesystem/dropbox/rw.ts +++ b/packages/filesystem/dropbox/rw.ts @@ -1,7 +1,17 @@ +import { FileSystemError, fileConflictError, unsupportedConditionalWriteError } from "../error"; import type { FileInfo, FileReader, FileWriter } from "../filesystem"; +import type { FileCreateOptions } from "../filesystem"; import { joinPath } from "../utils"; import type DropboxFileSystem from "./dropbox"; +function isDropboxUploadConflict(error: unknown): boolean { + if (error instanceof FileSystemError) { + return error.conflict; + } + const message = error instanceof Error ? error.message : String(error); + return message.includes("409") || message.includes("conflict") || message.includes("incorrect_offset"); +} + export class DropboxFileReader implements FileReader { file: FileInfo; @@ -53,41 +63,44 @@ export class DropboxFileWriter implements FileWriter { fs: DropboxFileSystem; - constructor(fs: DropboxFileSystem, path: string) { + opts?: FileCreateOptions; + + constructor(fs: DropboxFileSystem, path: string, opts?: FileCreateOptions) { this.fs = fs; this.path = path; + this.opts = opts; } async write(content: string | Blob): Promise { - // 检查文件是否存在 - const exists = await this.fs.exists(this.path); - - if (exists) { - // 如果文件存在,则更新 - return this.updateFile(content); - } else { - // 如果文件不存在,则创建 + if (this.opts?.expectedDigest && !this.opts.expectedVersion) { + throw unsupportedConditionalWriteError( + "dropbox", + "Dropbox conditional writes require expectedVersion (rev), not expectedDigest" + ); + } + if (this.opts?.createOnly) { return this.createNewFile(content); } + if (this.opts?.expectedVersion) { + return this.updateFile(content, this.opts.expectedVersion); + } + + return this.updateFile(content); } - private async updateFile(content: string | Blob): Promise { + private async updateFile(content: string | Blob, rev?: string): Promise { const myHeaders = new Headers(); myHeaders.append("Content-Type", "application/octet-stream"); myHeaders.append( "Dropbox-API-Arg", JSON.stringify({ path: this.path, - mode: "overwrite", + mode: rev ? { ".tag": "update", update: rev } : "overwrite", autorename: false, }) ); - await this.fs.request("https://content.dropboxapi.com/2/files/upload", { - method: "POST", - headers: myHeaders, - body: content instanceof Blob ? content : new Blob([content]), - }); + await this.upload(myHeaders, content); return Promise.resolve(); } @@ -104,12 +117,27 @@ export class DropboxFileWriter implements FileWriter { }) ); - await this.fs.request("https://content.dropboxapi.com/2/files/upload", { - method: "POST", - headers: myHeaders, - body: content instanceof Blob ? content : new Blob([content]), - }); + await this.upload(myHeaders, content); return Promise.resolve(); } + + private async upload(headers: Headers, content: string | Blob): Promise { + try { + await this.fs.request("https://content.dropboxapi.com/2/files/upload", { + method: "POST", + headers, + body: content instanceof Blob ? content : new Blob([content]), + }); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + if (isDropboxUploadConflict(error)) { + throw fileConflictError("dropbox", message, { + status: message.includes("409") ? 409 : undefined, + raw: error, + }); + } + throw error; + } + } } diff --git a/packages/filesystem/error.ts b/packages/filesystem/error.ts index f6bcd1f53..30825f6b5 100644 --- a/packages/filesystem/error.ts +++ b/packages/filesystem/error.ts @@ -34,6 +34,7 @@ export type FileSystemErrorOptions = { auth?: boolean; notFound?: boolean; rateLimit?: boolean; + unsupported?: boolean; raw?: unknown; }; @@ -54,6 +55,8 @@ export class FileSystemError extends Error { rateLimit: boolean; + unsupported: boolean; + raw?: unknown; constructor(options: FileSystemErrorOptions) { @@ -67,10 +70,33 @@ export class FileSystemError extends Error { this.auth = options.auth ?? false; this.notFound = options.notFound ?? false; this.rateLimit = options.rateLimit ?? false; + this.unsupported = options.unsupported ?? false; this.raw = options.raw; } } +export function fileConflictError( + provider: FileSystemProvider, + message: string, + options: Omit = {} +): FileSystemError { + return new FileSystemError({ + ...options, + provider, + message, + conflict: true, + }); +} + +export function unsupportedConditionalWriteError(provider: FileSystemProvider, message: string): FileSystemError { + return new FileSystemError({ + provider, + message, + code: "unsupported_conditional_write", + unsupported: true, + }); +} + export function isNotFoundError(error: unknown): error is FileSystemError { return error instanceof FileSystemError && error.notFound; } diff --git a/packages/filesystem/filesystem.ts b/packages/filesystem/filesystem.ts index be40308f0..bfe8ba878 100644 --- a/packages/filesystem/filesystem.ts +++ b/packages/filesystem/filesystem.ts @@ -8,6 +8,8 @@ export interface FileInfo { size: number; // 文件摘要 digest: string; + // Provider-specific write precondition token, such as rev/etag/version. + version?: string; // 文件创建时间 createtime: number; // 文件修改时间 @@ -29,6 +31,14 @@ export type FileReadWriter = FileReader & FileWriter; export type FileCreateOptions = { modifiedDate?: number; + expectedDigest?: string; + expectedVersion?: string; + createOnly?: boolean; +}; + +export type FileDeleteOptions = { + expectedDigest?: string; + expectedVersion?: string; }; // 文件读取 @@ -44,7 +54,7 @@ export default interface FileSystem { // 创建目录 createDir(dir: string, opts?: FileCreateOptions): Promise; // 删除文件 - delete(path: string): Promise; + delete(path: string, opts?: FileDeleteOptions): Promise; // 文件列表 list(): Promise; // getDirUrl 获取目录的url diff --git a/packages/filesystem/googledrive/googledrive.test.ts b/packages/filesystem/googledrive/googledrive.test.ts index 8f0de5859..faab8447e 100644 --- a/packages/filesystem/googledrive/googledrive.test.ts +++ b/packages/filesystem/googledrive/googledrive.test.ts @@ -48,6 +48,45 @@ describe("GoogleDriveFileSystem", () => { await expect(fs.delete("missing.txt")).resolves.toBeUndefined(); }); + it("delete should check version before conditional delete", async () => { + const fs = new GoogleDriveFileSystem("/", "token"); + const getFileIdSpy = vi.spyOn(fs, "getFileId"); + const requestSpy = vi.spyOn(fs, "request").mockResolvedValueOnce({ version: "2" }).mockResolvedValueOnce({}); + + await expect(fs.delete("test.txt", { expectedVersion: "file-1:2" })).resolves.toBeUndefined(); + + expect(getFileIdSpy).not.toHaveBeenCalled(); + expect(requestSpy.mock.calls[0][0]).toContain("/drive/v3/files/file-1?fields=version,md5Checksum"); + expect(requestSpy.mock.calls[1][0]).toContain("/drive/v3/files/file-1?spaces=appDataFolder"); + }); + + it("delete should reject when conditional digest changed", async () => { + const fs = new GoogleDriveFileSystem("/", "token"); + vi.spyOn(fs, "getFileId").mockResolvedValue("file-1"); + vi.spyOn(fs, "request").mockResolvedValue({ md5Checksum: "new-md5" }); + + await expect(fs.delete("test.txt", { expectedDigest: "old-md5" })).rejects.toMatchObject({ + provider: "googledrive", + conflict: true, + }); + }); + + it("delete should clear stale cached id on 404 response", async () => { + const fs = new GoogleDriveFileSystem("/", "token"); + (fs as any).pathToIdCache.set("/missing.txt", "stale-file-id"); + const notFoundError = new FileSystemError({ + provider: "googledrive", + message: "File not found", + status: 404, + notFound: true, + }); + vi.spyOn(fs, "request").mockRejectedValue(notFoundError); + + await expect(fs.delete("missing.txt")).resolves.toBeUndefined(); + + expect((fs as any).pathToIdCache.has("/missing.txt")).toBe(false); + }); + it("ensureDirExists should create missing nested directories and return final id", async () => { const fs = new GoogleDriveFileSystem("/", "token"); const findSpy = vi.spyOn(fs, "findFolderByName").mockResolvedValue(null); @@ -135,6 +174,103 @@ describe("GoogleDriveFileSystem", () => { expect(findFileSpy).toHaveBeenCalledTimes(1); }); + it("writer should best-effort validate expected Google Drive version before update", async () => { + const fs = new GoogleDriveFileSystem("/", "token"); + const writer = await fs.create("file.txt", { + expectedVersion: "file-1:version-7", + }); + const findSpy = vi.spyOn(fs, "findFileInDirectory"); + const requestSpy = vi + .spyOn(fs, "request") + .mockResolvedValueOnce({ version: "version-7" }) + .mockResolvedValueOnce({}); + + await expect(writer.write("content")).resolves.toBeUndefined(); + + expect(findSpy).not.toHaveBeenCalled(); + expect(requestSpy).toHaveBeenCalledTimes(2); + expect(requestSpy.mock.calls[0][0]).toBe( + "https://www.googleapis.com/drive/v3/files/file-1?fields=version&spaces=appDataFolder" + ); + expect(requestSpy.mock.calls[1][0]).toBe( + "https://www.googleapis.com/upload/drive/v3/files/file-1?uploadType=multipart&spaces=appDataFolder" + ); + expect((requestSpy.mock.calls[1][1] as RequestInit).headers).toBeUndefined(); + }); + + it("writer should reject update when Google Drive version changed", async () => { + const fs = new GoogleDriveFileSystem("/", "token"); + const writer = await fs.create("file.txt", { + expectedVersion: "file-1:version-7", + }); + vi.spyOn(fs, "request").mockResolvedValueOnce({ version: "version-8" }); + + await expect(writer.write("content")).rejects.toMatchObject({ + provider: "googledrive", + conflict: true, + status: 412, + }); + }); + + it("writer should reject createOnly when target already exists", async () => { + const fs = new GoogleDriveFileSystem("/", "token"); + const writer = await fs.create("file.txt", { createOnly: true }); + vi.spyOn(fs, "findFileInDirectory").mockResolvedValue("file-1"); + + await expect(writer.write("content")).rejects.toMatchObject({ + provider: "googledrive", + conflict: true, + }); + }); + + it("findFileInDirectory should reject duplicate file names", async () => { + const fs = new GoogleDriveFileSystem("/", "token"); + vi.spyOn(fs, "findFilesInDirectory").mockResolvedValue([{ id: "file-1" }, { id: "file-2" }]); + + await expect(fs.findFileInDirectory("file.txt", "parent-id")).rejects.toMatchObject({ + provider: "googledrive", + conflict: true, + }); + }); + + it("writer should create createOnly files without generating a Google Drive id", async () => { + const fs = new GoogleDriveFileSystem("/", "token"); + const writer = await fs.create("file.txt", { createOnly: true }); + vi.spyOn(fs, "findFileInDirectory").mockResolvedValue(null); + vi.spyOn(fs, "findFilesInDirectory").mockResolvedValue([{ id: "created-file" }]); + const requestSpy = vi.spyOn(fs, "request").mockResolvedValueOnce({ id: "created-file" }); + + await expect(writer.write("content")).resolves.toBeUndefined(); + + expect(requestSpy).toHaveBeenCalledTimes(1); + expect(requestSpy.mock.calls[0][0]).toBe( + "https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart&spaces=appDataFolder&fields=id" + ); + const createOptions = requestSpy.mock.calls[0][1] as RequestInit; + expect(createOptions.headers).toBeUndefined(); + const formData = createOptions.body as FormData; + expect(formData.get("metadata")).toBeTruthy(); + }); + + it("writer should rollback and reject createOnly when Google Drive creates a duplicate name", async () => { + const fs = new GoogleDriveFileSystem("/", "token"); + const writer = await fs.create("file.txt", { createOnly: true }); + vi.spyOn(fs, "findFileInDirectory").mockResolvedValue(null); + vi.spyOn(fs, "findFilesInDirectory").mockResolvedValue([{ id: "other-file" }, { id: "created-file" }]); + const requestSpy = vi.spyOn(fs, "request").mockResolvedValueOnce({ id: "created-file" }).mockResolvedValueOnce({}); + + await expect(writer.write("content")).rejects.toMatchObject({ + provider: "googledrive", + conflict: true, + }); + + expect(requestSpy).toHaveBeenCalledTimes(2); + expect(requestSpy.mock.calls[1][0]).toBe( + "https://www.googleapis.com/drive/v3/files/created-file?spaces=appDataFolder" + ); + expect((requestSpy.mock.calls[1][1] as RequestInit).method).toBe("DELETE"); + }); + it("list should clear stale path cache and retry once on provider 404", async () => { const fs = new GoogleDriveFileSystem("/Base", "token"); const notFoundError = new FileSystemError({ @@ -351,4 +487,29 @@ describe("GoogleDriveFileSystem", () => { }); } }); + + it("list should expose opaque Google Drive version token", async () => { + const fs = new GoogleDriveFileSystem("/", "token"); + vi.spyOn(fs, "request").mockResolvedValue({ + files: [ + { + id: "file-1", + name: "test.user.js", + size: "12", + md5Checksum: "md5", + createdTime: "2024-01-01T00:00:00.000Z", + modifiedTime: "2024-01-02T00:00:00.000Z", + version: "7", + }, + ], + }); + + await expect(fs.list()).resolves.toMatchObject([ + { + name: "test.user.js", + digest: "md5", + version: "file-1:7", + }, + ]); + }); }); diff --git a/packages/filesystem/googledrive/googledrive.ts b/packages/filesystem/googledrive/googledrive.ts index b5934f20e..5d142e4a9 100644 --- a/packages/filesystem/googledrive/googledrive.ts +++ b/packages/filesystem/googledrive/googledrive.ts @@ -1,7 +1,7 @@ import { AuthVerify } from "../auth"; -import { FileSystemError, isNotFoundError } from "../error"; +import { FileSystemError, fileConflictError, isNotFoundError } from "../error"; import type FileSystem from "../filesystem"; -import type { FileInfo, FileCreateOptions, FileReader, FileWriter } from "../filesystem"; +import type { FileInfo, FileCreateOptions, FileDeleteOptions, FileReader, FileWriter } from "../filesystem"; import { joinPath } from "../utils"; import { GoogleDriveFileReader, GoogleDriveFileWriter } from "./rw"; @@ -32,8 +32,8 @@ export default class GoogleDriveFileSystem implements FileSystem { return Promise.resolve(new GoogleDriveFileSystem(joinPath(this.path, path), this.accessToken)); } - create(path: string, _opts?: FileCreateOptions): Promise { - return Promise.resolve(new GoogleDriveFileWriter(this, joinPath(this.path, path))); + create(path: string, opts?: FileCreateOptions): Promise { + return Promise.resolve(new GoogleDriveFileWriter(this, joinPath(this.path, path), opts)); } async createDir(dir: string, _opts?: FileCreateOptions): Promise { if (!dir) { @@ -173,7 +173,10 @@ export default class GoogleDriveFileSystem implements FileSystem { if (nothen) { return doFetch().then(async (resp) => { if (resp.status === 401) { - return retryWithFreshToken(); + resp = await retryWithFreshToken(); + } + if (!resp.ok) { + throw await this.createResponseError(resp); } return resp; }); @@ -211,30 +214,51 @@ export default class GoogleDriveFileSystem implements FileSystem { return data; }); } - async delete(path: string): Promise { + async delete(path: string, opts?: FileDeleteOptions): Promise { const fullPath = joinPath(this.path, path); + const expected = parseGoogleDriveDeleteVersion(opts?.expectedVersion); // 首先,找到要删除的文件或文件夹 - const fileId = await this.getFileId(fullPath); + const fileId = expected?.fileId || (await this.getFileId(fullPath)); if (!fileId) { return; } + if (expected?.version || opts?.expectedDigest) { + // Google Drive delete 没有使用服务端 If-Match;这里先读 version/md5Checksum 再删除。 + // 这只能发现删除前已经过期的本地快照,不能消除检查后到删除前的并发更新窗口。 + // 典型残留窗口:A 读 version 通过后,B 更新文件,A 的 DELETE 仍可能删除 B 的新内容。 + const metadata = await this.request( + `https://www.googleapis.com/drive/v3/files/${fileId}?fields=version,md5Checksum&spaces=appDataFolder` + ); + const currentVersion = metadata?.version ? String(metadata.version) : undefined; + const currentDigest = metadata?.md5Checksum ? String(metadata.md5Checksum) : undefined; + if ( + (expected?.version && currentVersion !== expected.version) || + (opts?.expectedDigest && currentDigest !== opts.expectedDigest) + ) { + throw fileConflictError("googledrive", `Google Drive file changed before delete: ${fullPath}`, { + status: 412, + code: "versionMismatch", + }); + } + } // 删除文件或文件夹 - await this.request( - `https://www.googleapis.com/drive/v3/files/${fileId}?spaces=appDataFolder`, - { - method: "DELETE", - }, - true - ).then(async (resp) => { - if (resp.status === 404) { + try { + await this.request( + `https://www.googleapis.com/drive/v3/files/${fileId}?spaces=appDataFolder`, + { + method: "DELETE", + }, + true + ); + } catch (error) { + if (isNotFoundError(error)) { + this.clearRelatedCache(fullPath); return; } - if (resp.status !== 204 && resp.status !== 200) { - throw new Error(await resp.text()); - } - }); + throw error; + } // 清除相关缓存 this.clearRelatedCache(fullPath); @@ -322,7 +346,10 @@ export default class GoogleDriveFileSystem implements FileSystem { } const url = new URL("https://www.googleapis.com/drive/v3/files"); url.searchParams.set("q", query); - url.searchParams.set("fields", "files(id,name,mimeType,size,md5Checksum,createdTime,modifiedTime),nextPageToken"); + url.searchParams.set( + "fields", + "files(id,name,mimeType,size,md5Checksum,createdTime,modifiedTime,version),nextPageToken" + ); url.searchParams.set("spaces", "appDataFolder"); if (pageToken) { url.searchParams.set("pageToken", pageToken); @@ -337,6 +364,9 @@ export default class GoogleDriveFileSystem implements FileSystem { path: this.path, size: item.size ? parseInt(item.size, 10) : 0, digest: item.md5Checksum || "", + // 将 fileId 和 Drive version 编进 version,供写入/删除前做 best-effort 过期检查。 + // 这不是服务端原子 CAS;Google Drive 路径仍然只能降低风险,不能完全消除并发窗口。 + version: item.version ? `${item.id}:${item.version}` : item.id, createtime: new Date(item.createdTime).getTime(), updatetime: new Date(item.modifiedTime).getTime(), }); @@ -354,15 +384,23 @@ export default class GoogleDriveFileSystem implements FileSystem { // 辅助方法:在指定目录中查找文件 async findFileInDirectory(fileName: string, parentId: string): Promise { + const files = await this.findFilesInDirectory(fileName, parentId); + if (files.length > 1) { + throw fileConflictError("googledrive", `Duplicate Google Drive files found: ${fileName}`, { + status: 409, + code: "nameAlreadyExists", + }); + } + return files[0]?.id || null; + } + + async findFilesInDirectory(fileName: string, parentId: string): Promise> { const query = `name='${fileName}' and '${parentId}' in parents and trashed=false and mimeType!='application/vnd.google-apps.folder'`; const response = await this.request( `https://www.googleapis.com/drive/v3/files?q=${encodeURIComponent(query)}&fields=files(id)&spaces=appDataFolder` ); - if (response.files && response.files.length > 0) { - return response.files[0].id; - } - return null; + return response.files || []; } clearPathCache(path?: string): void { @@ -392,3 +430,15 @@ export default class GoogleDriveFileSystem implements FileSystem { return this.ensureDirPath(dirPath); } } + +function parseGoogleDriveDeleteVersion(version?: string): { fileId: string; version?: string } | undefined { + if (!version) return undefined; + const index = version.indexOf(":"); + if (index === -1) { + return { fileId: version }; + } + return { + fileId: version.substring(0, index), + version: version.substring(index + 1) || undefined, + }; +} diff --git a/packages/filesystem/googledrive/rw.ts b/packages/filesystem/googledrive/rw.ts index a19a6828d..e0de69797 100644 --- a/packages/filesystem/googledrive/rw.ts +++ b/packages/filesystem/googledrive/rw.ts @@ -1,5 +1,5 @@ -import { isNotFoundError } from "../error"; -import type { FileInfo, FileReader, FileWriter } from "../filesystem"; +import { fileConflictError, isNotFoundError } from "../error"; +import type { FileCreateOptions, FileInfo, FileReader, FileWriter } from "../filesystem"; import { joinPath } from "../utils"; import type GoogleDriveFileSystem from "./googledrive"; @@ -46,9 +46,12 @@ export class GoogleDriveFileWriter implements FileWriter { fs: GoogleDriveFileSystem; - constructor(fs: GoogleDriveFileSystem, path: string) { + opts?: FileCreateOptions; + + constructor(fs: GoogleDriveFileSystem, path: string, opts?: FileCreateOptions) { this.fs = fs; this.path = path; + this.opts = opts; } async write(content: string | Blob): Promise { @@ -73,18 +76,31 @@ export class GoogleDriveFileWriter implements FileWriter { const parentId = await this.fs.ensureDirExists(dirPath); // 使用优化的查找方法 - const existingFileId = await this.fs.findFileInDirectory(fileName, parentId); + const expected = parseGoogleDriveVersion(this.opts?.expectedVersion); + const existingFileId = expected?.fileId || (await this.fs.findFileInDirectory(fileName, parentId)); if (existingFileId) { + if (this.opts?.createOnly) { + throw fileConflictError("googledrive", `File already exists: ${this.path}`, { + status: 409, + code: "nameAlreadyExists", + }); + } // 如果文件存在,则更新 - return this.updateFile(existingFileId, content); + return this.updateFile(existingFileId, content, expected?.version); } else { // 如果文件不存在,则创建 return this.createNewFile(fileName, parentId, content); } } - private async updateFile(fileId: string, content: string | Blob): Promise { + private async updateFile(fileId: string, content: string | Blob, expectedVersion?: string): Promise { + if (expectedVersion) { + // Google Drive writer 没有原子 compare-and-swap 更新路径。 + // 这里的 preflight 只能在 PATCH 前发现本地快照已过期,不是服务端写入条件。 + // 残留窗口:A assertVersion 通过后,B 写入新内容,A 随后的 PATCH 仍可能覆盖 B。 + await this.assertVersion(fileId, expectedVersion); + } // 不设置Content-Type,让浏览器自动处理multipart/form-data边界 const metadata = { @@ -106,6 +122,19 @@ export class GoogleDriveFileWriter implements FileWriter { return Promise.resolve(); } + private async assertVersion(fileId: string, expectedVersion: string): Promise { + const metadata = await this.fs.request( + `https://www.googleapis.com/drive/v3/files/${fileId}?fields=version&spaces=appDataFolder` + ); + const currentVersion = metadata?.version ? String(metadata.version) : undefined; + if (currentVersion !== expectedVersion) { + throw fileConflictError("googledrive", `Google Drive file changed before write: ${this.path}`, { + status: 412, + code: "versionMismatch", + }); + } + } + private async createNewFile(fileName: string, parentId: string, content: string | Blob): Promise { // 不设置Content-Type,让浏览器自动处理multipart/form-data边界 @@ -118,14 +147,51 @@ export class GoogleDriveFileWriter implements FileWriter { formData.append("metadata", new Blob([JSON.stringify(metadata)], { type: "application/json" })); formData.append("file", content instanceof Blob ? content : new Blob([content])); - await this.fs.request( - `https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart&spaces=appDataFolder`, + const created = await this.fs.request( + `https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart&spaces=appDataFolder&fields=id`, { method: "POST", body: formData, } ); + if (this.opts?.createOnly) { + await this.rejectDuplicateCreate(fileName, parentId, created?.id); + } + return Promise.resolve(); } + + private async rejectDuplicateCreate(fileName: string, parentId: string, createdId?: string): Promise { + if (!createdId) { + return; + } + const files = await this.fs.findFilesInDirectory(fileName, parentId); + if (!files.length || (files.length === 1 && files[0].id === createdId)) { + return; + } + try { + await this.fs.request(`https://www.googleapis.com/drive/v3/files/${createdId}?spaces=appDataFolder`, { + method: "DELETE", + }); + } catch { + // best-effort 清理。即使清理失败,冲突仍会阻止本地 digest/status 推进。 + } + throw fileConflictError("googledrive", `Duplicate Google Drive file detected after create: ${this.path}`, { + status: 409, + code: "nameAlreadyExists", + }); + } +} + +function parseGoogleDriveVersion(version?: string): { fileId: string; version?: string } | undefined { + if (!version) return undefined; + const index = version.indexOf(":"); + if (index === -1) { + return { fileId: version }; + } + return { + fileId: version.substring(0, index), + version: version.substring(index + 1) || undefined, + }; } diff --git a/packages/filesystem/limiter.test.ts b/packages/filesystem/limiter.test.ts index f76ed56f4..79e66a891 100644 --- a/packages/filesystem/limiter.test.ts +++ b/packages/filesystem/limiter.test.ts @@ -140,6 +140,13 @@ describe("LimiterFileSystem", () => { expect(fs.delete).toHaveBeenCalledTimes(1); }); + it("should pass FileDeleteOptions through delete", async () => { + const fs = createFs(); + const limiter = new LimiterFileSystem(fs); + await limiter.delete(file.path, { expectedVersion: "etag-1" }); + expect(fs.delete).toHaveBeenCalledWith(file.path, { expectedVersion: "etag-1" }); + }); + it("should not retry createDir on 429", async () => { const fs = createFs(); vi.mocked(fs.createDir).mockRejectedValueOnce(new Error("429 Too Many Requests")); diff --git a/packages/filesystem/limiter.ts b/packages/filesystem/limiter.ts index 5dfb54b81..d8e0714ae 100644 --- a/packages/filesystem/limiter.ts +++ b/packages/filesystem/limiter.ts @@ -1,5 +1,5 @@ import type FileSystem from "./filesystem"; -import type { FileCreateOptions, FileInfo, FileReader, FileWriter } from "./filesystem"; +import type { FileCreateOptions, FileDeleteOptions, FileInfo, FileReader, FileWriter } from "./filesystem"; const RETRYABLE_429_OPS = new Set(["verify", "open", "read", "openDir", "list", "getDirUrl"]); @@ -127,8 +127,8 @@ export default class LimiterFileSystem implements FileSystem { return this.limiter.execute(() => this.fs.createDir(dir, opts), "createDir"); } - delete(path: string): Promise { - return this.limiter.execute(() => this.fs.delete(path), "delete"); + delete(path: string, opts?: FileDeleteOptions): Promise { + return this.limiter.execute(() => this.fs.delete(path, opts), "delete"); } list(): Promise { diff --git a/packages/filesystem/onedrive/onedrive.test.ts b/packages/filesystem/onedrive/onedrive.test.ts index d3c2afaba..ee3b7f97d 100644 --- a/packages/filesystem/onedrive/onedrive.test.ts +++ b/packages/filesystem/onedrive/onedrive.test.ts @@ -95,6 +95,17 @@ describe("OneDriveFileSystem", () => { await expect(fs.delete("missing.txt")).resolves.toBeUndefined(); }); + it("delete should send If-Match when expectedVersion is provided", async () => { + const fs = new OneDriveFileSystem("/", "token"); + const requestSpy = vi.spyOn(fs, "request").mockResolvedValue({} as unknown as Response); + + await expect(fs.delete("test.txt", { expectedVersion: '"etag-1"' })).resolves.toBeUndefined(); + + const config = requestSpy.mock.calls[0][1] as RequestInit; + expect(config.method).toBe("DELETE"); + expect((config.headers as Headers).get("If-Match")).toBe('"etag-1"'); + }); + it("createDir should create nested directories from root", async () => { const fs = new OneDriveFileSystem("/", "token"); const requestSpy = vi.spyOn(fs, "request").mockResolvedValue({}); @@ -114,6 +125,21 @@ describe("OneDriveFileSystem", () => { }); }); + it("createDir should strip ScriptCat prefix when called with sync root", async () => { + const fs = new OneDriveFileSystem("/", "token"); + const requestSpy = vi.spyOn(fs, "request").mockResolvedValue({}); + + await expect(fs.createDir("ScriptCat/sync")).resolves.toBeUndefined(); + + expect(requestSpy).toHaveBeenCalledTimes(1); + expect(requestSpy.mock.calls[0][0]).toBe("https://graph.microsoft.com/v1.0/me/drive/special/approot/children"); + expect(JSON.parse((requestSpy.mock.calls[0][1] as RequestInit).body as string)).toMatchObject({ + name: "sync", + folder: {}, + "@microsoft.graph.conflictBehavior": "fail", + }); + }); + it("createDir should continue when an intermediate directory already exists", async () => { const fs = new OneDriveFileSystem("/", "token"); const requestSpy = vi @@ -298,6 +324,17 @@ describe("OneDriveFileSystem", () => { }); }); + it("writer should send If-Match on simple PUT when expectedVersion is provided", async () => { + const fs = new OneDriveFileSystem("/", "token"); + const requestSpy = vi.spyOn(fs, "request").mockResolvedValue({}); + + const writer = await fs.create("empty.txt", { expectedVersion: "etag-1" }); + await writer.write(""); + + const headers = (requestSpy.mock.calls[0][1] as RequestInit).headers as Headers; + expect(headers.get("If-Match")).toBe("etag-1"); + }); + it("writer should upload empty Blob with simple PUT", async () => { const fs = new OneDriveFileSystem("/", "token"); const requestSpy = vi.spyOn(fs, "request").mockResolvedValue({}); @@ -331,4 +368,46 @@ describe("OneDriveFileSystem", () => { const headers = (requestSpy.mock.calls[1][1] as RequestInit).headers as Headers; expect(headers.get("Content-Range")).toBe("bytes 0-2/3"); }); + + it("writer should send If-None-Match and fail conflict behavior for createOnly upload session", async () => { + const fs = new OneDriveFileSystem("/", "token"); + const requestSpy = vi + .spyOn(fs, "request") + .mockResolvedValueOnce({ uploadUrl: "https://upload.example/session" }) + .mockResolvedValueOnce({}); + + const writer = await fs.create("not-empty.txt", { createOnly: true }); + await writer.write("abc"); + + const headers = (requestSpy.mock.calls[0][1] as RequestInit).headers as Headers; + expect(headers.get("If-None-Match")).toBe("*"); + expect(JSON.parse((requestSpy.mock.calls[0][1] as RequestInit).body as string)).toMatchObject({ + item: { + "@microsoft.graph.conflictBehavior": "fail", + }, + }); + }); + + it("list should expose eTag as version", async () => { + const fs = new OneDriveFileSystem("/", "token"); + vi.spyOn(fs, "request").mockResolvedValue({ + value: [ + { + name: "test.user.js", + size: 1, + eTag: "etag-1", + createdDateTime: "2024-01-01T00:00:00.000Z", + lastModifiedDateTime: "2024-01-02T00:00:00.000Z", + }, + ], + }); + + await expect(fs.list()).resolves.toMatchObject([ + { + name: "test.user.js", + digest: "etag-1", + version: "etag-1", + }, + ]); + }); }); diff --git a/packages/filesystem/onedrive/onedrive.ts b/packages/filesystem/onedrive/onedrive.ts index 3a2bef2b0..4e222b432 100644 --- a/packages/filesystem/onedrive/onedrive.ts +++ b/packages/filesystem/onedrive/onedrive.ts @@ -1,8 +1,8 @@ import { AuthVerify } from "../auth"; -import { FileSystemError } from "../error"; -import type { FileInfo, FileCreateOptions, FileReader, FileWriter } from "../filesystem"; +import { FileSystemError, isNotFoundError } from "../error"; +import type { FileInfo, FileCreateOptions, FileDeleteOptions, FileReader, FileWriter } from "../filesystem"; import type FileSystem from "../filesystem"; -import { joinPath } from "../utils"; +import { buildExpectedHeaders, joinPath } from "../utils"; import { OneDriveFileReader, OneDriveFileWriter } from "./rw"; export default class OneDriveFileSystem implements FileSystem { @@ -32,8 +32,8 @@ export default class OneDriveFileSystem implements FileSystem { return new OneDriveFileSystem(joinPath(this.path, path), this.accessToken); } - async create(path: string, _opts?: FileCreateOptions): Promise { - return new OneDriveFileWriter(this, joinPath(this.path, path)); + async create(path: string, opts?: FileCreateOptions): Promise { + return new OneDriveFileWriter(this, joinPath(this.path, path), opts); } async createDir(dir: string, _opts?: FileCreateOptions): Promise { @@ -144,7 +144,10 @@ export default class OneDriveFileSystem implements FileSystem { if (nothen) { return doFetch().then(async (resp) => { if (resp.status === 401 && !url.includes("uploadSession")) { - return retryWithFreshToken(); + resp = await retryWithFreshToken(); + } + if (!resp.ok) { + throw await this.createResponseError(resp); } return resp; }); @@ -182,19 +185,22 @@ export default class OneDriveFileSystem implements FileSystem { }); } - async delete(path: string): Promise { - const resp = await this.request( - `https://graph.microsoft.com/v1.0/me/drive/special/approot:${joinPath(this.path, path)}`, - { - method: "DELETE", - }, - true - ); - if (resp.status === 404) { - return; - } - if (resp.status !== 204) { - throw new Error(await resp.text()); + async delete(path: string, opts?: FileDeleteOptions): Promise { + try { + const expectedHeaders = buildExpectedHeaders(opts); + await this.request( + `https://graph.microsoft.com/v1.0/me/drive/special/approot:${joinPath(this.path, path)}`, + { + method: "DELETE", + ...(Object.keys(expectedHeaders).length ? { headers: new Headers(expectedHeaders) } : {}), + }, + true + ); + } catch (error) { + if (isNotFoundError(error)) { + return; + } + throw error; } } @@ -225,6 +231,7 @@ export default class OneDriveFileSystem implements FileSystem { path: this.path, size: val.size, digest: val.eTag, + version: val.eTag, createtime: new Date(val.createdDateTime).getTime(), updatetime: new Date(val.lastModifiedDateTime).getTime(), }); diff --git a/packages/filesystem/onedrive/rw.ts b/packages/filesystem/onedrive/rw.ts index b0d33d55a..47c08cdd4 100644 --- a/packages/filesystem/onedrive/rw.ts +++ b/packages/filesystem/onedrive/rw.ts @@ -1,6 +1,5 @@ -import { calculateMd5, md5OfText } from "@App/pkg/utils/crypto"; -import type { FileInfo, FileReader, FileWriter } from "../filesystem"; -import { joinPath } from "../utils"; +import type { FileCreateOptions, FileInfo, FileReader, FileWriter } from "../filesystem"; +import { buildConditionalHeaders, joinPath } from "../utils"; import type OneDriveFileSystem from "./onedrive"; export class OneDriveFileReader implements FileReader { @@ -37,9 +36,12 @@ export class OneDriveFileWriter implements FileWriter { fs: OneDriveFileSystem; - constructor(fs: OneDriveFileSystem, path: string) { + opts?: FileCreateOptions; + + constructor(fs: OneDriveFileSystem, path: string, opts?: FileCreateOptions) { this.fs = fs; this.path = path; + this.opts = opts; } size(content: string | Blob) { @@ -49,32 +51,28 @@ export class OneDriveFileWriter implements FileWriter { return new Blob([content]).size; } - async md5(content: string | Blob) { - if (content instanceof Blob) { - return calculateMd5(content); - } - return md5OfText(content); - } - async write(content: string | Blob): Promise { // 预上传获取id const size = this.size(content); if (size === 0) { + const headers = this.createConditionalHeaders(); return this.fs.request(`https://graph.microsoft.com/v1.0/me/drive/special/approot:${this.path}:/content`, { method: "PUT", body: content, + ...(headers ? { headers } : {}), }); } let myHeaders = new Headers(); myHeaders.append("Content-Type", "application/json"); + const conditionalHeaders = this.createConditionalHeaders(myHeaders); const uploadUrl = await this.fs .request(`https://graph.microsoft.com/v1.0/me/drive/special/approot:${this.path}:/createUploadSession`, { method: "POST", - headers: myHeaders, + headers: conditionalHeaders, body: JSON.stringify({ item: { - "@microsoft.graph.conflictBehavior": "replace", + "@microsoft.graph.conflictBehavior": this.opts?.createOnly ? "fail" : "replace", // description: "description", // fileSystemInfo: { // "@odata.type": "microsoft.graph.fileSystemInfo", @@ -97,4 +95,13 @@ export class OneDriveFileWriter implements FileWriter { headers: myHeaders, }); } + + private createConditionalHeaders(base?: Headers): Headers | undefined { + const headers = base || new Headers(); + const conditionalHeaders = buildConditionalHeaders(this.opts); + for (const [name, value] of Object.entries(conditionalHeaders)) { + headers.set(name, value); + } + return base || Object.keys(conditionalHeaders).length > 0 ? headers : undefined; + } } diff --git a/packages/filesystem/s3/rw.ts b/packages/filesystem/s3/rw.ts index 378fb18ad..7953463fe 100644 --- a/packages/filesystem/s3/rw.ts +++ b/packages/filesystem/s3/rw.ts @@ -1,5 +1,7 @@ -import type { S3Client } from "./client"; -import type { FileReader, FileWriter } from "../filesystem"; +import { S3Error, type S3Client } from "./client"; +import { fileConflictError } from "../error"; +import type { FileCreateOptions, FileReader, FileWriter } from "../filesystem"; +import { buildConditionalHeaders } from "../utils"; /** * S3 文件读取器 @@ -46,11 +48,14 @@ export class S3FileWriter implements FileWriter { modifiedDate?: number; - constructor(client: S3Client, bucket: string, key: string, modifiedDate?: number) { + opts?: FileCreateOptions; + + constructor(client: S3Client, bucket: string, key: string, opts?: FileCreateOptions) { this.client = client; this.bucket = bucket; this.key = key; - this.modifiedDate = modifiedDate; + this.modifiedDate = opts?.modifiedDate; + this.opts = opts; } /** @@ -68,10 +73,22 @@ export class S3FileWriter implements FileWriter { // 历史兼容:S3 侧使用 createtime 元数据保存文件时间,实际来源是 FileCreateOptions.modifiedDate。 headers["x-amz-meta-createtime"] = new Date(this.modifiedDate).toISOString(); } + Object.assign(headers, buildConditionalHeaders(this.opts)); - await this.client.request("PUT", this.bucket, this.key, { - body: typeof body === "string" ? body : body, - headers, - }); + try { + await this.client.request("PUT", this.bucket, this.key, { + body: typeof body === "string" ? body : body, + headers, + }); + } catch (error) { + if (error instanceof S3Error && (error.statusCode === 409 || error.statusCode === 412)) { + throw fileConflictError("s3", error.message, { + status: error.statusCode, + code: error.code, + raw: error, + }); + } + throw error; + } } } diff --git a/packages/filesystem/s3/s3.test.ts b/packages/filesystem/s3/s3.test.ts index 49a4e2940..6dfd2f6a3 100644 --- a/packages/filesystem/s3/s3.test.ts +++ b/packages/filesystem/s3/s3.test.ts @@ -25,13 +25,14 @@ function createMockResponse(options: { statusText?: string; text?: string; blob?: Blob; + headers?: Headers; }): Response { const { ok = true, status = 200, statusText = "OK", text = "" } = options; return { ok, status, statusText, - headers: new Headers(), + headers: options.headers || new Headers(), text: vi.fn().mockResolvedValue(text), blob: vi.fn().mockResolvedValue(options.blob ?? new Blob([text])), } as unknown as Response; @@ -202,6 +203,44 @@ describe("S3FileSystem", () => { }) ); }); + + it("S3FileWriter.write 应按 expectedVersion 设置 If-Match", async () => { + (mockClient.request as ReturnType).mockResolvedValue(createMockResponse({ ok: true })); + + const writer = await fs.create("output.txt", { + expectedVersion: "etag-1", + }); + await writer.write("hello world"); + + expect(mockClient.request).toHaveBeenCalledWith( + "PUT", + "test-bucket", + "output.txt", + expect.objectContaining({ + headers: expect.objectContaining({ + "If-Match": "etag-1", + }), + }) + ); + }); + + it("S3FileWriter.write 应按 createOnly 设置 If-None-Match", async () => { + (mockClient.request as ReturnType).mockResolvedValue(createMockResponse({ ok: true })); + + const writer = await fs.create("output.txt", { createOnly: true }); + await writer.write("hello world"); + + expect(mockClient.request).toHaveBeenCalledWith( + "PUT", + "test-bucket", + "output.txt", + expect.objectContaining({ + headers: expect.objectContaining({ + "If-None-Match": "*", + }), + }) + ); + }); }); // ---- createDir ---- @@ -220,6 +259,27 @@ describe("S3FileSystem", () => { expect(mockClient.request).toHaveBeenCalledWith("DELETE", "test-bucket", "test.txt"); }); + it("应当在条件删除时发送 If-Match", async () => { + (mockClient.request as ReturnType).mockResolvedValue(createMockResponse({ ok: true, status: 204 })); + + await expect(fs.delete("test.txt", { expectedVersion: '"etag-1"' })).resolves.toBeUndefined(); + + expect(mockClient.request).toHaveBeenCalledWith("DELETE", "test-bucket", "test.txt", { + headers: { "If-Match": '"etag-1"' }, + }); + }); + + it("应当把条件删除失败转换为冲突错误", async () => { + (mockClient.request as ReturnType).mockRejectedValue( + new S3Error("PreconditionFailed", "Precondition Failed", 412) + ); + + await expect(fs.delete("test.txt", { expectedVersion: '"etag-1"' })).rejects.toMatchObject({ + provider: "s3", + conflict: true, + }); + }); + it("应当在 NoSuchKey 时静默成功(幂等删除)", async () => { (mockClient.request as ReturnType).mockRejectedValue( new S3Error("NoSuchKey", "The specified key does not exist", 404) @@ -267,6 +327,7 @@ describe("S3FileSystem", () => { path: "/", size: 1024, digest: "abc123", + version: '"abc123"', }); expect(files[1]).toMatchObject({ name: "file2.txt", @@ -276,6 +337,26 @@ describe("S3FileSystem", () => { }); }); + it("list 不应为每个对象额外 HEAD 读取 metadata createtime", async () => { + const xml = ` + + false + + file1.txt + 2024-01-02T00:00:00.000Z + "abc123" + 1024 + + `; + (mockClient.request as ReturnType).mockResolvedValueOnce(createMockResponse({ text: xml })); + + const files = await fs.list(); + + expect(files[0].createtime).toBe(new Date("2024-01-02T00:00:00.000Z").getTime()); + expect(files[0].updatetime).toBe(new Date("2024-01-02T00:00:00.000Z").getTime()); + expect(mockClient.request).toHaveBeenCalledTimes(1); + }); + it("应当正确处理带 basePath 的目录列表", async () => { const subFs = new S3FileSystem("test-bucket", mockClient, "/docs"); diff --git a/packages/filesystem/s3/s3.ts b/packages/filesystem/s3/s3.ts index 41ce89e6a..69552e325 100644 --- a/packages/filesystem/s3/s3.ts +++ b/packages/filesystem/s3/s3.ts @@ -2,10 +2,10 @@ import { XMLParser } from "fast-xml-parser"; import { S3Client, S3Error } from "./client"; import type { S3ClientConfig } from "./client"; import type FileSystem from "../filesystem"; -import type { FileInfo, FileCreateOptions, FileReader, FileWriter } from "../filesystem"; -import { joinPath } from "../utils"; +import type { FileInfo, FileCreateOptions, FileDeleteOptions, FileReader, FileWriter } from "../filesystem"; +import { buildExpectedHeaders, joinPath } from "../utils"; import { S3FileReader, S3FileWriter } from "./rw"; -import { WarpTokenError } from "../error"; +import { fileConflictError, WarpTokenError } from "../error"; // ---- ListObjectsV2 XML 解析 ---- @@ -167,7 +167,7 @@ export default class S3FileSystem implements FileSystem { * @returns 文件写入器 */ async create(path: string, opts?: FileCreateOptions): Promise { - return new S3FileWriter(this.client, this.bucket, joinPath(this.basePath, path).substring(1), opts?.modifiedDate); + return new S3FileWriter(this.client, this.bucket, joinPath(this.basePath, path).substring(1), opts); } /** @@ -182,10 +182,22 @@ export default class S3FileSystem implements FileSystem { * 此操作幂等——删除不存在的文件也会成功 * @param path 相对于当前 basePath 的文件路径 */ - async delete(path: string): Promise { + async delete(path: string, opts?: FileDeleteOptions): Promise { try { - await this.client.request("DELETE", this.bucket, joinPath(this.basePath, path).substring(1)); + const headers = buildExpectedHeaders(opts); + if (Object.keys(headers).length) { + await this.client.request("DELETE", this.bucket, joinPath(this.basePath, path).substring(1), { headers }); + } else { + await this.client.request("DELETE", this.bucket, joinPath(this.basePath, path).substring(1)); + } } catch (error: any) { + if (error instanceof S3Error && (error.statusCode === 409 || error.statusCode === 412)) { + throw fileConflictError("s3", error.message, { + status: error.statusCode, + code: error.code, + raw: error, + }); + } // S3 delete 是幂等的,key 不存在时也视为成功 if (error instanceof S3Error && error.code === "NoSuchKey") { return; @@ -237,6 +249,7 @@ export default class S3FileSystem implements FileSystem { path: this.basePath, size: obj.size || 0, digest: obj.etag?.replace(/"/g, "") || "", + version: obj.etag || "", createtime: lastModified, updatetime: lastModified, }); diff --git a/packages/filesystem/utils.test.ts b/packages/filesystem/utils.test.ts new file mode 100644 index 000000000..965298033 --- /dev/null +++ b/packages/filesystem/utils.test.ts @@ -0,0 +1,36 @@ +import { describe, expect, it } from "vitest"; +import { buildConditionalHeaders, buildExpectedHeaders } from "./utils"; + +describe("filesystem utils", () => { + it("buildConditionalHeaders should prefer createOnly over expected tokens", () => { + expect(buildConditionalHeaders({ createOnly: true, expectedVersion: "etag-1" })).toEqual({ + "If-None-Match": "*", + }); + }); + + it("buildConditionalHeaders should use expectedVersion before expectedDigest", () => { + expect(buildConditionalHeaders({ expectedVersion: "version-1", expectedDigest: "digest-1" })).toEqual({ + "If-Match": "version-1", + }); + }); + + it("buildConditionalHeaders should use expectedDigest when version is absent", () => { + expect(buildConditionalHeaders({ expectedDigest: "digest-1" })).toEqual({ + "If-Match": "digest-1", + }); + }); + + it("buildConditionalHeaders should return no headers without conditions", () => { + expect(buildConditionalHeaders()).toEqual({}); + }); + + it("buildExpectedHeaders should use expectedVersion before expectedDigest", () => { + expect(buildExpectedHeaders({ expectedVersion: "version-1", expectedDigest: "digest-1" })).toEqual({ + "If-Match": "version-1", + }); + }); + + it("buildExpectedHeaders should return no headers without expected tokens", () => { + expect(buildExpectedHeaders()).toEqual({}); + }); +}); diff --git a/packages/filesystem/utils.ts b/packages/filesystem/utils.ts index 4df95c7aa..12bc778bd 100644 --- a/packages/filesystem/utils.ts +++ b/packages/filesystem/utils.ts @@ -1,3 +1,5 @@ +import type { FileCreateOptions, FileDeleteOptions } from "./filesystem"; + export function joinPath(...paths: string[]): string { let path = ""; for (let value of paths) { @@ -14,3 +16,15 @@ export function joinPath(...paths: string[]): string { } return path; } + +export function buildConditionalHeaders(opts?: FileCreateOptions): Record { + if (opts?.createOnly) { + return { "If-None-Match": "*" }; + } + return buildExpectedHeaders(opts); +} + +export function buildExpectedHeaders(opts?: FileDeleteOptions): Record { + const expected = opts?.expectedVersion || opts?.expectedDigest; + return expected ? { "If-Match": expected } : {}; +} diff --git a/packages/filesystem/webdav/rw.ts b/packages/filesystem/webdav/rw.ts index c13afe2f4..9d0f6d417 100644 --- a/packages/filesystem/webdav/rw.ts +++ b/packages/filesystem/webdav/rw.ts @@ -1,5 +1,7 @@ import type { WebDAVClient } from "webdav"; -import type { FileReader, FileWriter } from "../filesystem"; +import { fileConflictError } from "../error"; +import type { FileCreateOptions, FileReader, FileWriter } from "../filesystem"; +import { buildConditionalHeaders } from "../utils"; export class WebDAVFileReader implements FileReader { client: WebDAVClient; @@ -32,14 +34,34 @@ export class WebDAVFileWriter implements FileWriter { path: string; - constructor(client: WebDAVClient, path: string) { + opts?: FileCreateOptions; + + constructor(client: WebDAVClient, path: string, opts?: FileCreateOptions) { this.client = client; this.path = path; + this.opts = opts; } async write(content: string | Blob): Promise { const data = content instanceof Blob ? await content.arrayBuffer() : content; - const resp = await this.client.putFileContents(this.path, data); + const headers = buildConditionalHeaders(this.opts); + delete headers["If-None-Match"]; + const options = { + ...(Object.keys(headers).length ? { headers } : {}), + ...(this.opts?.createOnly ? { overwrite: false } : {}), + }; + let resp; + try { + resp = await this.client.putFileContents(this.path, data, options); + } catch (error: any) { + if (error.response?.status === 409 || error.response?.status === 412) { + throw fileConflictError("webdav", error.message || "WebDAV conditional write failed", { + status: error.response.status, + raw: error, + }); + } + throw error; + } if (!resp) { throw new Error("write error"); } diff --git a/packages/filesystem/webdav/webdav.test.ts b/packages/filesystem/webdav/webdav.test.ts index a0098baa9..2920897e5 100644 --- a/packages/filesystem/webdav/webdav.test.ts +++ b/packages/filesystem/webdav/webdav.test.ts @@ -206,6 +206,29 @@ describe("WebDAVFileSystem", () => { expect(mockClient.deleteFile).toHaveBeenCalledWith("/test.txt"); }); + it("应当在条件删除时发送 If-Match", async () => { + const fs = createTestFS(mockClient); + + await fs.delete("test.txt", { expectedVersion: '"etag-1"' }); + + expect(mockClient.deleteFile).toHaveBeenCalledWith("/test.txt", { + headers: { "If-Match": '"etag-1"' }, + }); + }); + + it("应当把条件删除失败转换为冲突错误", async () => { + (mockClient.deleteFile as ReturnType).mockRejectedValue({ + response: { status: 412 }, + message: "Precondition Failed", + }); + const fs = createTestFS(mockClient); + + await expect(fs.delete("test.txt", { expectedVersion: '"etag-1"' })).rejects.toMatchObject({ + provider: "webdav", + conflict: true, + }); + }); + it("应当在 404 时静默成功(幂等删除)", async () => { (mockClient.deleteFile as ReturnType).mockRejectedValue({ response: { status: 404 }, @@ -244,6 +267,7 @@ describe("WebDAVFileSystem", () => { name: "test.txt", path: "/", digest: '"abc"', + version: '"abc"', size: 1024, }); }); @@ -268,6 +292,32 @@ describe("WebDAVFileSystem", () => { }); }); + describe("conditional write", () => { + it("应当按 expectedVersion 传入 If-Match", async () => { + const fs = createTestFS(mockClient); + + const writer = await fs.create("test.txt", { expectedVersion: '"etag-1"' }); + await writer.write("content"); + + expect(mockClient.putFileContents).toHaveBeenCalledWith("/test.txt", "content", { + headers: { + "If-Match": '"etag-1"', + }, + }); + }); + + it("应当按 createOnly 传入 overwrite=false", async () => { + const fs = createTestFS(mockClient); + + const writer = await fs.create("test.txt", { createOnly: true }); + await writer.write("content"); + + expect(mockClient.putFileContents).toHaveBeenCalledWith("/test.txt", "content", { + overwrite: false, + }); + }); + }); + describe("getDirUrl", () => { it("应当返回 url + basePath", async () => { const fs = createTestFS(mockClient); diff --git a/packages/filesystem/webdav/webdav.ts b/packages/filesystem/webdav/webdav.ts index 60b1be77a..3160b7501 100644 --- a/packages/filesystem/webdav/webdav.ts +++ b/packages/filesystem/webdav/webdav.ts @@ -1,10 +1,10 @@ import type { FileStat, WebDAVClient, WebDAVClientOptions } from "webdav"; import { createClient, getPatcher } from "webdav"; import type FileSystem from "../filesystem"; -import type { FileInfo, FileCreateOptions, FileReader, FileWriter } from "../filesystem"; -import { joinPath } from "../utils"; +import type { FileInfo, FileCreateOptions, FileDeleteOptions, FileReader, FileWriter } from "../filesystem"; +import { buildExpectedHeaders, joinPath } from "../utils"; import { WebDAVFileReader, WebDAVFileWriter } from "./rw"; -import { WarpTokenError } from "../error"; +import { fileConflictError, WarpTokenError } from "../error"; // 禁止 WebDAV 请求携带浏览器 cookies,只通过账号密码认证 (#1297) // 全局单次注册 @@ -98,8 +98,8 @@ export default class WebDAVFileSystem implements FileSystem { return WebDAVFileSystem.fromSameClient(this, joinPath(this.basePath, path)); } - async create(path: string, _opts?: FileCreateOptions): Promise { - return new WebDAVFileWriter(this.client, joinPath(this.basePath, path)); + async create(path: string, opts?: FileCreateOptions): Promise { + return new WebDAVFileWriter(this.client, joinPath(this.basePath, path), opts); } async createDir(path: string, _opts?: FileCreateOptions): Promise { @@ -114,10 +114,21 @@ export default class WebDAVFileSystem implements FileSystem { } } - async delete(path: string): Promise { + async delete(path: string, opts?: FileDeleteOptions): Promise { try { - await this.client.deleteFile(joinPath(this.basePath, path)); + const headers = buildExpectedHeaders(opts); + if (Object.keys(headers).length) { + await this.client.deleteFile(joinPath(this.basePath, path), { headers }); + } else { + await this.client.deleteFile(joinPath(this.basePath, path)); + } } catch (e: any) { + if (e.response?.status === 409 || e.response?.status === 412) { + throw fileConflictError("webdav", e.message || "WebDAV conditional delete failed", { + status: e.response.status, + raw: e, + }); + } if (e.response?.status === 404 || e.message?.includes("404")) { return; } @@ -143,6 +154,7 @@ export default class WebDAVFileSystem implements FileSystem { name: item.basename, path: this.basePath, digest: item.etag || "", + version: item.etag || "", size: item.size, createtime: time, updatetime: time, diff --git a/src/app/service/service_worker/index.ts b/src/app/service/service_worker/index.ts index 51f381283..ba496d0ca 100644 --- a/src/app/service/service_worker/index.ts +++ b/src/app/service/service_worker/index.ts @@ -142,11 +142,10 @@ export default class ServiceWorkerManager { break; case "cloudSync": // 进行一次云同步 - systemConfig.getCloudSync().then((config) => { - synchronize.buildFileSystem(config).then((fs) => { - synchronize.syncOnce(config, fs); - }); - }); + systemConfig + .getCloudSync() + .then((config) => synchronize.buildFileSystem(config).then((fs) => synchronize.syncOnce(config, fs))) + .catch((e) => console.error("cloudSync alarm error", e)); break; case "checkUpdate": // 检查扩展更新 diff --git a/src/app/service/service_worker/synchronize.test.ts b/src/app/service/service_worker/synchronize.test.ts index 4fc2ba742..86ab411f2 100644 --- a/src/app/service/service_worker/synchronize.test.ts +++ b/src/app/service/service_worker/synchronize.test.ts @@ -2,6 +2,7 @@ import { describe, it, expect, vi, beforeEach } from "vitest"; import { SynchronizeService } from "./synchronize"; import { initTestEnv } from "@Tests/utils"; import type FileSystem from "@Packages/filesystem/filesystem"; +import { FileSystemError } from "@Packages/filesystem/error"; import type { CloudSyncConfig } from "@App/pkg/config/config"; import { stackAsyncTask } from "@App/pkg/utils/async_queue"; import { md5OfText } from "@App/pkg/utils/crypto"; @@ -44,6 +45,41 @@ describe("SynchronizeService", () => { chrome.storage.local.clear(); }); + it("fails selected backup export when any requested script is missing", async () => { + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + { + getScriptValueDetails: vi.fn().mockResolvedValue([{}, undefined]), + } as any, + { + getResourceByType: vi.fn().mockResolvedValue({}), + } as any, + {} as any, + {} as any, + { + get: vi.fn().mockResolvedValueOnce(undefined).mockResolvedValueOnce({ + uuid: "existing", + name: "Existing", + downloadUrl: "https://example.com/existing.user.js", + updatetime: 1, + createtime: 1, + status: 1, + sort: 0, + metadata: {}, + }), + scriptCodeDAO: { + get: vi.fn().mockResolvedValue({ code: "// code" }), + }, + } as any + ); + + await expect(service.getScriptBackupData(["missing", "existing"])).rejects.toThrow( + "Failed to export 1 selected script(s)" + ); + }); + it("serializes concurrent syncOnce calls", async () => { let releaseFirst!: () => void; const firstGate = new Promise((resolve) => { @@ -331,6 +367,280 @@ console.log("ok");` expect(order).toEqual(["delete:start", "delete:end", "digest:list"]); }); + it("honors tombstone even when cloud script still exists and script digest is unchanged", async () => { + const deleteScript = vi.fn().mockResolvedValue(undefined); + const fs = createFs({ + list: vi + .fn() + .mockResolvedValueOnce([ + { + name: "del-uuid.user.js", + path: "/", + size: 1, + digest: "script-digest", + version: "script-version", + createtime: 1, + updatetime: 1, + }, + { + name: "del-uuid.meta.json", + path: "/", + size: 1, + digest: "new-meta-digest", + version: "meta-version", + createtime: 1, + updatetime: 2, + }, + ]) + .mockResolvedValueOnce([ + { + name: "del-uuid.meta.json", + path: "/", + size: 1, + digest: "new-meta-digest", + version: "meta-version", + createtime: 1, + updatetime: 2, + }, + ]), + open: vi.fn().mockResolvedValue({ + read: vi.fn().mockResolvedValue(JSON.stringify({ uuid: "del-uuid", isDeleted: true })), + }), + }); + const service = new SynchronizeService( + {} as any, + {} as any, + { deleteScript } as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([ + { + uuid: "del-uuid", + name: "del", + updatetime: 1, + createtime: 1, + status: 1, + sort: 0, + metadata: {}, + }, + ]), + } as any + ); + await (service as any).storage.set("file_digest", { + "del-uuid.user.js": "script-digest", + "del-uuid.meta.json": "old-meta-digest", + }); + + await service.syncOnce(syncConfig, fs); + + expect(deleteScript).toHaveBeenCalledWith("del-uuid", "sync"); + expect(fs.delete).toHaveBeenCalledWith("del-uuid.user.js", { + expectedVersion: "script-version", + }); + await expect((service as any).storage.get("tombstone_digest")).resolves.toEqual({ + "del-uuid.meta.json": "new-meta-digest", + }); + }); + + it("honors cached tombstone meta when cloud script still exists", async () => { + const deleteScript = vi.fn().mockResolvedValue(undefined); + const fs = createFs({ + list: vi + .fn() + .mockResolvedValueOnce([ + { + name: "del-uuid.user.js", + path: "/", + size: 1, + digest: "script-digest", + version: "script-version", + createtime: 1, + updatetime: 1, + }, + { + name: "del-uuid.meta.json", + path: "/", + size: 1, + digest: "tombstone-meta-digest", + version: "meta-version", + createtime: 1, + updatetime: 1, + }, + ]) + .mockResolvedValueOnce([]), + open: vi.fn().mockResolvedValue({ + read: vi.fn().mockResolvedValue(JSON.stringify({ uuid: "del-uuid", isDeleted: true })), + }), + }); + const service = new SynchronizeService( + {} as any, + {} as any, + { deleteScript } as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([ + { + uuid: "del-uuid", + name: "del", + updatetime: 1, + createtime: 1, + status: 1, + sort: 0, + metadata: {}, + }, + ]), + } as any + ); + await (service as any).storage.set("file_digest", { + "del-uuid.user.js": "script-digest", + "del-uuid.meta.json": "tombstone-meta-digest", + }); + await (service as any).storage.set("tombstone_digest", { + "del-uuid.meta.json": "tombstone-meta-digest", + }); + + await service.syncOnce(syncConfig, fs); + + expect(deleteScript).toHaveBeenCalledWith("del-uuid", "sync"); + expect(fs.delete).toHaveBeenCalledWith("del-uuid.user.js", { + expectedVersion: "script-version", + }); + }); + + it("reuses meta read when tombstone precheck falls through to pull", async () => { + const installScript = vi.fn().mockResolvedValue(undefined); + const metaFile = { + name: "pull-uuid.meta.json", + path: "/", + size: 1, + digest: "new-meta-digest", + version: "meta-version", + createtime: 1, + updatetime: 2, + }; + const scriptFile = { + name: "pull-uuid.user.js", + path: "/", + size: 1, + digest: "new-script-digest", + version: "script-version", + createtime: 1, + updatetime: 2, + }; + const openMock = vi.fn().mockImplementation(async (file) => ({ + read: vi.fn().mockResolvedValue( + file.name.endsWith(".meta.json") + ? JSON.stringify({ uuid: "pull-uuid", origin: "origin" }) + : `// ==UserScript== +// @name Pull Test +// @namespace sync-test +// @match https://example.com/* +// ==/UserScript== +console.log("ok");` + ), + })); + const fs = createFs({ + list: vi.fn().mockResolvedValueOnce([scriptFile, metaFile]).mockResolvedValueOnce([scriptFile, metaFile]), + open: openMock, + }); + const service = new SynchronizeService( + {} as any, + {} as any, + { installScript } as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([ + { + uuid: "pull-uuid", + name: "local", + downloadUrl: "", + updatetime: 1, + createtime: 1, + status: 1, + sort: 0, + metadata: {}, + }, + ]), + } as any + ); + await (service as any).storage.set("file_digest", { + "pull-uuid.user.js": "old-script-digest", + "pull-uuid.meta.json": "old-meta-digest", + }); + + await service.syncOnce(syncConfig, fs); + + expect(installScript).toHaveBeenCalledTimes(1); + expect(openMock.mock.calls.filter(([file]) => file.name.endsWith(".meta.json"))).toHaveLength(1); + expect(openMock.mock.calls.filter(([file]) => file.name.endsWith(".user.js"))).toHaveLength(1); + }); + + it("does not install cloud script when meta is a tombstone", async () => { + const installScript = vi.fn().mockResolvedValue(undefined); + const fs = createFs({ + list: vi + .fn() + .mockResolvedValueOnce([ + { + name: "del-uuid.user.js", + path: "/", + size: 1, + digest: "script-digest", + version: "script-version", + createtime: 1, + updatetime: 1, + }, + { + name: "del-uuid.meta.json", + path: "/", + size: 1, + digest: "meta-digest", + version: "meta-version", + createtime: 1, + updatetime: 2, + }, + ]) + .mockResolvedValueOnce([]), + open: vi.fn().mockResolvedValueOnce({ + read: vi.fn().mockResolvedValue(JSON.stringify({ uuid: "del-uuid", isDeleted: true })), + }), + }); + const service = new SynchronizeService( + {} as any, + {} as any, + { installScript } as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([]), + } as any + ); + + await service.syncOnce(syncConfig, fs); + + expect(installScript).not.toHaveBeenCalled(); + expect(fs.open).toHaveBeenCalledTimes(1); + expect(fs.open).toHaveBeenCalledWith(expect.objectContaining({ name: "del-uuid.meta.json" })); + expect(fs.delete).toHaveBeenCalledWith("del-uuid.user.js", { + expectedVersion: "script-version", + }); + }); + it("waits for pushScript before updating file digest", async () => { let releasePush!: () => void; const pushGate = new Promise((resolve) => { @@ -349,7 +659,7 @@ console.log("ok");` const fsList = vi .fn() .mockImplementationOnce(async () => []) - .mockImplementationOnce(async () => { + .mockImplementation(async () => { order.push("digest:list"); return []; }); @@ -413,7 +723,7 @@ console.log("ok");` metadata: {}, }; const fs = createFs({ - list: vi.fn().mockResolvedValueOnce([]).mockResolvedValueOnce([]), + list: vi.fn().mockResolvedValueOnce([]).mockResolvedValueOnce([]).mockResolvedValueOnce([]).mockResolvedValue([]), }); const service = new SynchronizeService( {} as any, @@ -450,6 +760,14 @@ console.log("ok");` const createMock = vi.fn().mockResolvedValue({ write: writeMock }); const fs = createFs({ create: createMock, + open: vi + .fn() + .mockResolvedValueOnce({ + read: vi.fn().mockResolvedValue("// old code"), + }) + .mockResolvedValueOnce({ + read: vi.fn().mockResolvedValue('{"uuid":"push-uuid"}'), + }), }); const service = new SynchronizeService( {} as any, @@ -481,17 +799,24 @@ console.log("ok");` await service.pushScript(fs, script as any); - expect(createMock.mock.calls[0]).toEqual(["push-uuid.user.js", { modifiedDate: 1234 }]); - expect(createMock.mock.calls[1]).toEqual(["push-uuid.meta.json", { modifiedDate: 1234 }]); + expect(createMock.mock.calls[0]).toEqual(["push-uuid.user.js", { modifiedDate: 1234, createOnly: true }]); + expect(createMock.mock.calls[1]).toEqual(["push-uuid.meta.json", { modifiedDate: 1234, createOnly: true }]); }); - it("uses Date.now as modifiedDate when writing scriptcat-sync.json", async () => { - const nowSpy = vi.spyOn(Date, "now").mockReturnValue(9876); + it("passes remote versions when pushing existing script and meta files", async () => { const createMock = vi.fn().mockResolvedValue({ write: vi.fn().mockResolvedValue(undefined), }); const fs = createFs({ create: createMock, + open: vi + .fn() + .mockResolvedValueOnce({ + read: vi.fn().mockResolvedValue("// old code"), + }) + .mockResolvedValueOnce({ + read: vi.fn().mockResolvedValue('{"uuid":"push-uuid"}'), + }), }); const service = new SynchronizeService( {} as any, @@ -502,29 +827,67 @@ console.log("ok");` {} as any, {} as any, { - scriptCodeDAO: {}, + scriptCodeDAO: { + get: vi.fn().mockResolvedValue({ code: "// code" }), + }, all: vi.fn().mockResolvedValue([]), } as any ); + const script = { + uuid: "push-uuid", + name: "push", + origin: "origin", + downloadUrl: "download-url", + checkUpdateUrl: "check-update-url", + updatetime: 1234, + createtime: 1000, + status: 1, + sort: 0, + metadata: {}, + }; - try { - await service.syncOnce(syncConfig, fs); + await service.pushScript(fs, script as any, { + script: { + name: "push-uuid.user.js", + path: "/", + size: 1, + digest: "digest-js", + version: "version-js", + createtime: 1, + updatetime: 1, + }, + meta: { + name: "push-uuid.meta.json", + path: "/", + size: 1, + digest: "digest-meta", + version: "version-meta", + createtime: 1, + updatetime: 1, + }, + }); - expect(createMock).toHaveBeenCalledWith("scriptcat-sync.json", { - modifiedDate: 9876, - }); - } finally { - nowSpy.mockRestore(); - } + expect(createMock.mock.calls[0]).toEqual([ + "push-uuid.user.js", + { modifiedDate: 1234, expectedVersion: "version-js" }, + ]); + expect(createMock.mock.calls[1]).toEqual([ + "push-uuid.meta.json", + { modifiedDate: 1234, expectedVersion: "version-meta" }, + ]); }); - it("uses Date.now as modifiedDate when writing delete tombstone meta", async () => { - const nowSpy = vi.spyOn(Date, "now").mockReturnValue(6789); - const createMock = vi.fn().mockResolvedValue({ - write: vi.fn().mockResolvedValue(undefined), - }); + it("cleans up newly created script file with a digest guard when meta write fails", async () => { + const scriptWriter = { write: vi.fn().mockResolvedValue(undefined) }; + const metaWriter = { + write: vi.fn().mockRejectedValue(new Error("meta write failed")), + }; + const createMock = vi.fn().mockResolvedValueOnce(scriptWriter).mockResolvedValueOnce(metaWriter); + const listMock = vi.fn().mockResolvedValue([]); const fs = createFs({ create: createMock, + delete: vi.fn().mockResolvedValue(undefined), + list: listMock, }); const service = new SynchronizeService( {} as any, @@ -535,45 +898,658 @@ console.log("ok");` {} as any, {} as any, { - scriptCodeDAO: {}, + scriptCodeDAO: { + get: vi.fn().mockResolvedValue({ code: "// code" }), + }, all: vi.fn().mockResolvedValue([]), } as any ); + const script = { + uuid: "push-uuid", + name: "push", + origin: "origin", + downloadUrl: "download-url", + checkUpdateUrl: "check-update-url", + updatetime: 1234, + createtime: 1000, + status: 1, + sort: 0, + metadata: {}, + }; - try { - await service.deleteCloudScript(fs, "delete-uuid", true); + await expect(service.pushScript(fs, script as any)).rejects.toThrow("meta write failed"); - expect(createMock).toHaveBeenCalledWith("delete-uuid.meta.json", { - modifiedDate: 6789, + expect(fs.delete).toHaveBeenCalledWith("push-uuid.user.js", { + expectedDigest: md5OfText("// code"), + }); + expect(listMock).not.toHaveBeenCalled(); + }); + + it("does not read or restore previous content when existing meta write fails", async () => { + const scriptWriter = { write: vi.fn().mockResolvedValue(undefined) }; + const metaWriter = { + write: vi.fn().mockRejectedValue(new Error("meta write failed")), + }; + const createMock = vi.fn().mockResolvedValueOnce(scriptWriter).mockResolvedValueOnce(metaWriter); + const oldScriptFile = { + name: "push-uuid.user.js", + path: "/", + size: 1, + digest: "old-digest-js", + version: "old-version-js", + createtime: 1, + updatetime: 1000, + }; + const oldMetaFile = { + name: "push-uuid.meta.json", + path: "/", + size: 1, + digest: "old-digest-meta", + version: "old-version-meta", + createtime: 1, + updatetime: 1000, + }; + const openMock = vi.fn(); + const listMock = vi.fn().mockResolvedValue([]); + const fs = createFs({ + open: openMock, + list: listMock, + create: createMock, + }); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: { + get: vi.fn().mockResolvedValue({ code: "// new code" }), + }, + all: vi.fn().mockResolvedValue([]), + } as any + ); + const script = { + uuid: "push-uuid", + name: "push", + origin: "origin", + downloadUrl: "download-url", + checkUpdateUrl: "check-update-url", + updatetime: 1234, + createtime: 1000, + status: 1, + sort: 0, + metadata: {}, + }; + + await expect( + service.pushScript(fs, script as any, { + script: oldScriptFile, + meta: oldMetaFile, + }) + ).rejects.toThrow("meta write failed"); + + expect(openMock).not.toHaveBeenCalled(); + expect(listMock).not.toHaveBeenCalled(); + expect(createMock).toHaveBeenCalledTimes(2); + expect(fs.delete).not.toHaveBeenCalled(); + }); + + it("uses Date.now as modifiedDate when writing scriptcat-sync.json", async () => { + const nowSpy = vi.spyOn(Date, "now").mockReturnValue(9876); + const createMock = vi.fn().mockResolvedValue({ + write: vi.fn().mockResolvedValue(undefined), + }); + const fs = createFs({ + create: createMock, + }); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([]), + } as any + ); + + try { + await service.syncOnce(syncConfig, fs); + + expect(createMock).toHaveBeenCalledWith("scriptcat-sync.json", { + modifiedDate: 9876, + createOnly: true, }); } finally { nowSpy.mockRestore(); } }); - it("preserves cloud-native digest and does not overwrite with pushed md5", async () => { - // 各后端 digest 格式不一致(webdav/onedrive 是 etag、dropbox 是 content_hash 等), - // 上传后再次 list 已经能拿到原生 digest 时,必须保留它,不能被本地 md5 覆盖, - // 否则下次同步比对会因格式不一致而把未变动的脚本判定为已变动并触发不必要的拉取/推送 - const scriptCode = "// code"; - const script = { - uuid: "push-uuid", - name: "push", - origin: "origin", - downloadUrl: "download-url", - checkUpdateUrl: "check-update-url", - updatetime: 1, - createtime: 1, - status: 1, - sort: 0, - metadata: {}, - }; - const cloudListAfterPush = [ - { name: "push-uuid.user.js", digest: "etag-user-js", updatetime: 1 }, - { name: "push-uuid.meta.json", digest: "etag-meta-json", updatetime: 1 }, - ]; + it("uses existing scriptcat-sync.json version as write precondition", async () => { + const nowSpy = vi.spyOn(Date, "now").mockReturnValue(9876); + const createMock = vi.fn().mockResolvedValue({ + write: vi.fn().mockResolvedValue(undefined), + }); + const fs = createFs({ + list: vi + .fn() + .mockResolvedValueOnce([ + { + name: "scriptcat-sync.json", + path: "/", + size: 1, + digest: "digest-sync", + version: "version-sync", + createtime: 1, + updatetime: 1, + }, + ]) + .mockResolvedValueOnce([]), + open: vi.fn().mockResolvedValue({ + read: vi.fn().mockResolvedValue(JSON.stringify({ version: "1.0.0", status: { scripts: {} } })), + }), + create: createMock, + }); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([]), + } as any + ); + + try { + await service.syncOnce(syncConfig, fs); + + expect(createMock).toHaveBeenCalledWith("scriptcat-sync.json", { + modifiedDate: 9876, + expectedVersion: "version-sync", + }); + } finally { + nowSpy.mockRestore(); + } + }); + + it("preserves unknown scriptcat-sync.json fields when writing status", async () => { + const writeMock = vi.fn().mockResolvedValue(undefined); + const fs = createFs({ + list: vi + .fn() + .mockResolvedValueOnce([ + { + name: "scriptcat-sync.json", + path: "/", + size: 1, + digest: "digest-sync", + version: "version-sync", + createtime: 1, + updatetime: 1, + }, + ]) + .mockResolvedValueOnce([]), + open: vi.fn().mockResolvedValue({ + read: vi.fn().mockResolvedValue( + JSON.stringify({ + version: "old-version", + schemaVersion: 2, + status: { scripts: {} }, + }) + ), + }), + create: vi.fn().mockResolvedValue({ + write: writeMock, + }), + }); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([]), + } as any + ); + + await service.syncOnce(syncConfig, fs); + + const written = JSON.parse(writeMock.mock.calls[0][0]); + expect(written.schemaVersion).toBe(2); + expect(written.version).not.toBe("old-version"); + expect(written.status).toEqual({ scripts: {} }); + }); + + it("notifies and skips digest update when scriptcat-sync.json hits remote conflict", async () => { + const conflict = new FileSystemError({ + provider: "webdav", + message: "Precondition failed", + status: 412, + conflict: true, + }); + const createMock = vi.fn().mockResolvedValue({ + write: vi.fn().mockRejectedValue(conflict), + }); + const fs = createFs({ + create: createMock, + }); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([]), + } as any + ); + const notifySpy = vi.spyOn(service, "notifySyncFailed").mockImplementation(() => {}); + const updateDigestSpy = vi.spyOn(service, "updateFileDigest"); + + await service.syncOnce(syncConfig, fs); + + expect(createMock).toHaveBeenCalledWith("scriptcat-sync.json", expect.anything()); + expect(updateDigestSpy).not.toHaveBeenCalled(); + expect(notifySpy).toHaveBeenCalledWith(true, 1); + }); + + it("notifies and skips digest update when scriptcat-sync.json write fails", async () => { + const createMock = vi.fn().mockResolvedValue({ + write: vi.fn().mockRejectedValue(new Error("sync status write failed")), + }); + const fs = createFs({ + create: createMock, + }); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([]), + } as any + ); + const notifySpy = vi.spyOn(service, "notifySyncFailed").mockImplementation(() => {}); + const updateDigestSpy = vi.spyOn(service, "updateFileDigest"); + + await service.syncOnce(syncConfig, fs); + + expect(createMock).toHaveBeenCalledWith("scriptcat-sync.json", expect.anything()); + expect(updateDigestSpy).not.toHaveBeenCalled(); + expect(notifySpy).toHaveBeenCalledWith(false, 1); + }); + + it("uses Date.now as modifiedDate when writing delete tombstone meta", async () => { + const nowSpy = vi.spyOn(Date, "now").mockReturnValue(6789); + const createMock = vi.fn().mockResolvedValue({ + write: vi.fn().mockResolvedValue(undefined), + }); + const fs = createFs({ + create: createMock, + }); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([]), + } as any + ); + + try { + await service.deleteCloudScript(fs, "delete-uuid", true); + + expect(createMock).toHaveBeenCalledWith("delete-uuid.meta.json", { + modifiedDate: 6789, + createOnly: true, + }); + } finally { + nowSpy.mockRestore(); + } + }); + + it("uses remote meta precondition when writing delete tombstone meta", async () => { + const nowSpy = vi.spyOn(Date, "now").mockReturnValue(6789); + const createMock = vi.fn().mockResolvedValue({ + write: vi.fn().mockResolvedValue(undefined), + }); + const fs = createFs({ + create: createMock, + }); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([]), + } as any + ); + + try { + await service.deleteCloudScript(fs, "delete-uuid", true, { + script: { + name: "delete-uuid.user.js", + path: "/", + size: 1, + digest: "script-digest", + version: "script-version", + createtime: 1, + updatetime: 1, + }, + meta: { + name: "delete-uuid.meta.json", + path: "/", + size: 1, + digest: "meta-digest", + version: "meta-version", + createtime: 1, + updatetime: 1, + }, + }); + + expect(fs.delete).toHaveBeenCalledWith("delete-uuid.user.js", { + expectedVersion: "script-version", + }); + expect(createMock).toHaveBeenCalledWith("delete-uuid.meta.json", { + modifiedDate: 6789, + expectedVersion: "meta-version", + }); + } finally { + nowSpy.mockRestore(); + } + }); + + it("does not downgrade missing remote script snapshot to unconditional delete", async () => { + const createMock = vi.fn().mockResolvedValue({ + write: vi.fn().mockResolvedValue(undefined), + }); + const fs = createFs({ + create: createMock, + }); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([]), + } as any + ); + + await service.deleteCloudScript(fs, "delete-uuid", true, { + meta: { + name: "delete-uuid.meta.json", + path: "/", + size: 1, + digest: "meta-digest", + version: "meta-version", + createtime: 1, + updatetime: 1, + }, + }); + + expect(fs.delete).not.toHaveBeenCalledWith("delete-uuid.user.js", undefined); + expect(createMock).toHaveBeenCalledWith( + "delete-uuid.meta.json", + expect.objectContaining({ expectedVersion: "meta-version" }) + ); + }); + + it("does not downgrade missing remote meta snapshot to unconditional delete", async () => { + const fs = createFs(); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([]), + } as any + ); + + await service.deleteCloudScript(fs, "delete-uuid", false, { + script: { + name: "delete-uuid.user.js", + path: "/", + size: 1, + digest: "script-digest", + version: "script-version", + createtime: 1, + updatetime: 1, + }, + }); + + expect(fs.delete).toHaveBeenCalledTimes(1); + expect(fs.delete).toHaveBeenCalledWith("delete-uuid.user.js", { + expectedVersion: "script-version", + }); + }); + + it("preserves cloud-native digest and does not overwrite with pushed md5", async () => { + // 各后端 digest 格式不一致(webdav/onedrive 是 etag、dropbox 是 content_hash 等), + // 上传后再次 list 已经能拿到原生 digest 时,必须保留它,不能被本地 md5 覆盖, + // 否则下次同步比对会因格式不一致而把未变动的脚本判定为已变动并触发不必要的拉取/推送 + const scriptCode = "// code"; + const script = { + uuid: "push-uuid", + name: "push", + origin: "origin", + downloadUrl: "download-url", + checkUpdateUrl: "check-update-url", + updatetime: 1, + createtime: 1, + status: 1, + sort: 0, + metadata: {}, + }; + const cloudListAfterPush = [ + { name: "push-uuid.user.js", digest: "etag-user-js", updatetime: 1 }, + { name: "push-uuid.meta.json", digest: "etag-meta-json", updatetime: 1 }, + ]; + const fs = createFs({ + list: vi + .fn() + .mockResolvedValueOnce([]) + .mockResolvedValueOnce(cloudListAfterPush) + .mockResolvedValueOnce(cloudListAfterPush) + .mockResolvedValue(cloudListAfterPush), + }); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: { + get: vi.fn().mockResolvedValue({ code: scriptCode }), + }, + all: vi.fn().mockResolvedValue([script]), + } as any + ); + + await service.syncOnce({ ...syncConfig, syncStatus: false }, fs); + + await expect((service as any).storage.get("file_digest")).resolves.toEqual({ + "push-uuid.user.js": "etag-user-js", + "push-uuid.meta.json": "etag-meta-json", + }); + }); + + it("retries digest list before falling back to pushed md5", async () => { + const fs = createFs({ + list: vi + .fn() + .mockResolvedValueOnce([]) + .mockResolvedValueOnce([ + { + name: "push-uuid.user.js", + path: "push-uuid.user.js", + size: 1, + digest: "etag-user-js", + createtime: 1, + updatetime: 1, + }, + ]), + }); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([]), + } as any + ); + + await service.updateFileDigest(fs, { + "push-uuid.user.js": "local-md5", + }); + + expect(fs.list).toHaveBeenCalledTimes(2); + await expect((service as any).storage.get("file_digest")).resolves.toEqual({ + "push-uuid.user.js": "etag-user-js", + }); + }); + + it("keeps cloud digest when cloud list returns previous digest after overwrite", async () => { + const fs = createFs({ + list: vi.fn().mockResolvedValueOnce([ + { + name: "push-uuid.user.js", + path: "push-uuid.user.js", + size: 1, + digest: "old-md5", + createtime: 1, + updatetime: 1, + }, + ]), + }); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([]), + } as any + ); + + await service.updateFileDigest(fs, { + "push-uuid.user.js": "new-md5", + }); + + await expect((service as any).storage.get("file_digest")).resolves.toEqual({ + "push-uuid.user.js": "old-md5", + }); + }); + + it("prunes changed tombstone digest entries but keeps entries missing from one list", async () => { + const fs = createFs({ + list: vi.fn().mockResolvedValueOnce([ + { + name: "keep.meta.json", + path: "keep.meta.json", + size: 1, + digest: "same-tombstone-digest", + createtime: 1, + updatetime: 1, + }, + { + name: "changed.meta.json", + path: "changed.meta.json", + size: 1, + digest: "new-meta-digest", + createtime: 1, + updatetime: 1, + }, + ]), + }); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([]), + } as any + ); + await (service as any).storage.set("tombstone_digest", { + "keep.meta.json": "same-tombstone-digest", + "changed.meta.json": "old-tombstone-digest", + "gone.meta.json": "gone-tombstone-digest", + }); + + await service.updateFileDigest(fs); + + await expect((service as any).storage.get("tombstone_digest")).resolves.toEqual({ + "keep.meta.json": "same-tombstone-digest", + "gone.meta.json": "gone-tombstone-digest", + }); + }); + + it("skips status and digest update when a push hits remote conflict", async () => { + const conflict = new FileSystemError({ + provider: "webdav", + message: "Precondition failed", + status: 412, + conflict: true, + }); const fs = createFs({ - list: vi.fn().mockResolvedValueOnce([]).mockResolvedValueOnce(cloudListAfterPush), + list: vi.fn().mockResolvedValueOnce([]), }); const service = new SynchronizeService( {} as any, @@ -585,18 +1561,236 @@ console.log("ok");` {} as any, { scriptCodeDAO: { - get: vi.fn().mockResolvedValue({ code: scriptCode }), + get: vi.fn().mockResolvedValue({ code: "// code" }), }, - all: vi.fn().mockResolvedValue([script]), + all: vi.fn().mockResolvedValue([ + { + uuid: "push-uuid", + name: "push", + updatetime: 1, + createtime: 1, + status: 1, + sort: 0, + metadata: {}, + }, + ]), } as any ); - await service.syncOnce({ ...syncConfig, syncStatus: false }, fs); + vi.spyOn(service, "pushScript").mockRejectedValue(conflict); + const updateDigestSpy = vi.spyOn(service, "updateFileDigest"); + const notifySpy = vi.spyOn(service, "notifySyncFailed").mockImplementation(() => {}); - await expect((service as any).storage.get("file_digest")).resolves.toEqual({ - "push-uuid.user.js": "etag-user-js", - "push-uuid.meta.json": "etag-meta-json", + await service.syncOnce(syncConfig, fs); + + expect(updateDigestSpy).not.toHaveBeenCalled(); + expect(fs.create).not.toHaveBeenCalledWith("scriptcat-sync.json", expect.anything()); + expect(notifySpy).toHaveBeenCalledWith(true, 1); + }); + + it("skips status and digest update when any push task fails", async () => { + const error = new Error("network failed after partial write"); + const fs = createFs({ + list: vi.fn().mockResolvedValueOnce([]), + }); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: { + get: vi.fn().mockResolvedValue({ code: "// code" }), + }, + all: vi.fn().mockResolvedValue([ + { + uuid: "push-uuid", + name: "push", + updatetime: 1, + createtime: 1, + status: 1, + sort: 0, + metadata: {}, + }, + ]), + } as any + ); + + vi.spyOn(service, "pushScript").mockRejectedValue(error); + const updateDigestSpy = vi.spyOn(service, "updateFileDigest"); + const notifySpy = vi.spyOn(service, "notifySyncFailed").mockImplementation(() => {}); + + await service.syncOnce(syncConfig, fs); + + expect(updateDigestSpy).not.toHaveBeenCalled(); + expect(fs.create).not.toHaveBeenCalledWith("scriptcat-sync.json", expect.anything()); + expect(notifySpy).toHaveBeenCalledWith(false, 1); + }); + + it("skips status and digest update when pullScript fails", async () => { + const error = new Error("install failed during pull"); + const fs = createFs({ + list: vi.fn().mockResolvedValueOnce([ + { + name: "pull-uuid.user.js", + path: "/", + size: 1, + digest: "d1", + createtime: 1, + updatetime: 2, + }, + { + name: "pull-uuid.meta.json", + path: "/", + size: 1, + digest: "d2", + createtime: 1, + updatetime: 2, + }, + ]), + open: vi.fn().mockImplementation(async (file) => ({ + read: vi.fn().mockResolvedValue( + file.name.endsWith(".user.js") + ? `// ==UserScript== +// @name Pull Test +// @namespace sync-test +// @match https://example.com/* +// ==/UserScript== +console.log("ok");` + : JSON.stringify({ uuid: "pull-uuid" }) + ), + })), + }); + const service = new SynchronizeService( + {} as any, + {} as any, + { installScript: vi.fn().mockRejectedValue(error) } as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([]), + } as any + ); + + const updateDigestSpy = vi.spyOn(service, "updateFileDigest"); + const notifySpy = vi.spyOn(service, "notifySyncFailed").mockImplementation(() => {}); + + await service.syncOnce(syncConfig, fs); + + expect(updateDigestSpy).not.toHaveBeenCalled(); + expect(fs.create).not.toHaveBeenCalledWith("scriptcat-sync.json", expect.anything()); + expect(notifySpy).toHaveBeenCalledWith(false, 1); + }); + + it("skips digest update when status sync fails", async () => { + const fs = createFs({ + list: vi.fn().mockResolvedValueOnce([ + { + name: "status-uuid.user.js", + path: "/", + size: 1, + digest: "d1", + createtime: 1, + updatetime: 1, + }, + { + name: "status-uuid.meta.json", + path: "/", + size: 1, + digest: "d2", + createtime: 1, + updatetime: 1, + }, + { + name: "scriptcat-sync.json", + path: "/", + size: 1, + digest: "sync-digest", + createtime: 1, + updatetime: 1, + }, + ]), + open: vi.fn().mockResolvedValue({ + read: vi.fn().mockResolvedValue( + JSON.stringify({ + version: "1.0.0", + status: { + scripts: { + "status-uuid": { + enable: false, + sort: 0, + updatetime: 2, + }, + }, + }, + }) + ), + }), + }); + const service = new SynchronizeService( + {} as any, + {} as any, + { + enableScript: vi.fn().mockRejectedValue(new Error("enable failed")), + } as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + update: vi.fn(), + all: vi.fn().mockResolvedValue([ + { + uuid: "status-uuid", + name: "status", + updatetime: 1, + createtime: 1, + status: 1, + sort: 0, + metadata: {}, + }, + ]), + } as any + ); + const notifySpy = vi.spyOn(service, "notifySyncFailed").mockImplementation(() => {}); + const updateDigestSpy = vi.spyOn(service, "updateFileDigest"); + await (service as any).storage.set("file_digest", { + "status-uuid.user.js": "d1", + }); + + await service.syncOnce(syncConfig, fs); + + expect(updateDigestSpy).not.toHaveBeenCalled(); + expect(fs.create).not.toHaveBeenCalledWith("scriptcat-sync.json", expect.anything()); + expect(notifySpy).toHaveBeenCalledWith(false, 1); + }); + + it("deleteCloudScript rejects delete failures so caller can notify", async () => { + const fs = createFs({ + delete: vi.fn().mockRejectedValue(new Error("delete failed")), }); + const service = new SynchronizeService( + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + {} as any, + { + scriptCodeDAO: {}, + all: vi.fn().mockResolvedValue([]), + } as any + ); + + await expect(service.deleteCloudScript(fs, "delete-uuid", true)).rejects.toThrow("delete failed"); }); it("scriptInstall enters cloud_sync queue and updates digest after push", async () => { @@ -695,7 +1889,28 @@ console.log("ok");` }), }); - const deleteFs = createFs(); + const deleteFs = createFs({ + list: vi.fn().mockResolvedValue([ + { + name: "from-user.user.js", + path: "/", + size: 1, + digest: "script-digest", + version: "script-version", + createtime: 1, + updatetime: 1, + }, + { + name: "from-user.meta.json", + path: "/", + size: 1, + digest: "meta-digest", + version: "meta-version", + createtime: 1, + updatetime: 1, + }, + ]), + }); const service = new SynchronizeService( {} as any, {} as any, @@ -713,7 +1928,7 @@ console.log("ok");` ); vi.spyOn(service as any, "buildFileSystem").mockResolvedValue(deleteFs); - vi.spyOn(service, "deleteCloudScript").mockImplementation(async (_fs: any, uuid: string) => { + const deleteSpy = vi.spyOn(service, "deleteCloudScript").mockImplementation(async (_fs: any, uuid: string) => { order.push(`delete:${uuid}`); }); const realUpdateDigest = service.updateFileDigest.bind(service); @@ -744,6 +1959,15 @@ console.log("ok");` // deleteBy === "sync" 的不应触发云端删除;并且 digest 必须在删除全部完成后才更新 expect(order).toEqual(["sync:list", "sync:digest", "delete:from-user", "delete:digest"]); + expect(deleteSpy).toHaveBeenCalledWith( + deleteFs, + "from-user", + true, + expect.objectContaining({ + script: expect.objectContaining({ version: "script-version" }), + meta: expect.objectContaining({ version: "meta-version" }), + }) + ); }); it("scriptsDelete skips enqueue when all entries are deleteBy=sync", async () => { diff --git a/src/app/service/service_worker/synchronize.ts b/src/app/service/service_worker/synchronize.ts index 822ecfc01..80dc2dcf3 100644 --- a/src/app/service/service_worker/synchronize.ts +++ b/src/app/service/service_worker/synchronize.ts @@ -10,11 +10,11 @@ import { } from "@App/app/repo/scripts"; import BackupExport from "@App/pkg/backup/export"; import type { BackupData, ResourceBackup, ScriptBackupData, ScriptOptions, ValueStorage } from "@App/pkg/backup/struct"; -import type { FileInfo } from "@Packages/filesystem/filesystem"; +import type { FileCreateOptions, FileInfo } from "@Packages/filesystem/filesystem"; import type FileSystem from "@Packages/filesystem/filesystem"; import ZipFileSystem from "@Packages/filesystem/zip/zip"; import FileSystemFactory, { type FileSystemType } from "@Packages/filesystem/factory"; -import { isWarpTokenError } from "@Packages/filesystem/error"; +import { isConflictError, isWarpTokenError } from "@Packages/filesystem/error"; import type { Group } from "@Packages/message/server"; import type { MessageSend } from "@Packages/message/types"; import { type IMessageQueue } from "@Packages/message/message_queue"; @@ -73,11 +73,76 @@ type FileDigestMap = { }; const SYNC_SERVICE_TASK_KEY = "cloud_sync_queue"; +const FILE_DIGEST_STORAGE_KEY = "file_digest"; +const TOMBSTONE_DIGEST_STORAGE_KEY = "tombstone_digest"; +const SCRIPTCAT_SYNC_FILENAME = "scriptcat-sync.json"; +const SCRIPT_FILE_SUFFIX = ".user.js"; +const META_FILE_SUFFIX = ".meta.json"; function getScriptModifiedDate(script: PushScriptParam): number { return script.updatetime || script.createtime || Date.now(); } +function getWriteOptions(modifiedDate: number, remoteFile?: FileInfo): FileCreateOptions { + const opts: FileCreateOptions = { modifiedDate }; + if (!remoteFile) { + // 新文件必须用 createOnly,避免 list 短暂漏文件时把另一台设备刚创建的同名文件覆盖掉。 + opts.createOnly = true; + return opts; + } + // 优先使用 provider 暴露的原生版本 token(etag/rev/version),没有版本时才退到 digest。 + // 部分 provider 的 digest 不是 MD5,调用方不能把 expectedDigest 当成跨 provider 的强一致 CAS。 + if (remoteFile.version) { + opts.expectedVersion = remoteFile.version; + } else if (remoteFile.digest) { + opts.expectedDigest = remoteFile.digest; + } + return opts; +} + +function getDeleteOptions(remoteFile?: FileInfo) { + if (!remoteFile) { + return undefined; + } + // 删除也尽量使用远端快照里的版本 token;这能让 S3/WebDAV/OneDrive 走服务端 If-Match。 + // Baidu/Dropbox/Google Drive 只能做删除前校验,仍然不是原子删除,详见各 provider 注释。 + if (remoteFile.version) { + return { expectedVersion: remoteFile.version }; + } + if (remoteFile.digest) { + return { expectedDigest: remoteFile.digest }; + } + return undefined; +} + +async function readSyncMeta(fs: FileSystem, file: FileInfo): Promise { + const meta = await fs.open(file); + return JSON.parse((await meta.read("string")) as string) as SyncMeta; +} + +function groupFilesByUuid(list: FileInfo[]): Map> { + const uuidMap = new Map>(); + const getOrCreate = (uuid: string) => { + let files = uuidMap.get(uuid); + if (!files) { + files = {}; + uuidMap.set(uuid, files); + } + return files; + }; + + for (const file of list) { + if (file.name.endsWith(SCRIPT_FILE_SUFFIX)) { + const uuid = file.name.slice(0, -SCRIPT_FILE_SUFFIX.length); + getOrCreate(uuid).script = file; + } else if (file.name.endsWith(META_FILE_SUFFIX)) { + const uuid = file.name.slice(0, -META_FILE_SUFFIX.length); + getOrCreate(uuid).meta = file; + } + } + return uuidMap; +} + export class SynchronizeService { logger: Logger; @@ -113,18 +178,27 @@ export class SynchronizeService { // 获取脚本备份数据 async getScriptBackupData(uuids?: string[]) { if (uuids) { - const rets: Promise[] = []; - uuids.forEach((uuid) => { - rets.push( - this.scriptDAO.get(uuid).then((script) => { - if (script) { - return this.generateScriptBackupData(script); - } - return Promise.reject(new Error(`Script ${uuid} not found`)); - }) - ); + const results = await Promise.allSettled( + uuids.map(async (uuid) => { + const script = await this.scriptDAO.get(uuid); + if (!script) { + throw new Error(`Script ${uuid} not found`); + } + return this.generateScriptBackupData(script); + }) + ); + const failed = results.filter((ret): ret is PromiseRejectedResult => ret.status === "rejected"); + failed.forEach((ret) => { + this.logger.warn("failed to export selected script", Logger.E(ret.reason)); }); - return Promise.all(rets); // 不处理 Promise.reject ? + if (failed.length) { + // 用户明确选择导出 uuid 时,缺失/失败不能静默跳过; + // 否则会生成不完整备份而用户无感。这里先收集并记录所有失败,再让导出整体失败。 + throw new Error(`Failed to export ${failed.length} selected script(s)`); + } + return results + .filter((ret): ret is PromiseFulfilledResult => ret.status === "fulfilled") + .map((ret) => ret.value); } // 获取所有脚本 const list = await this.scriptDAO.all(); @@ -190,7 +264,6 @@ export class SynchronizeService { } return ret; } - importResources(data: { uuid: string; requires: ResourceBackup[]; @@ -350,34 +423,46 @@ export class SynchronizeService { }); } + public notifySyncFailed(hasConflict: boolean, rejectedCount: number) { + this.logger.warn("skip status and digest update because cloud sync task failed", { + conflict: hasConflict, + failed: rejectedCount, + }); + const title = i18n.t("notification.script_sync_failed"); + const message = hasConflict + ? i18n.t("notification.script_sync_conflict_desc") + : i18n.t("notification.script_sync_failed_desc"); + InfoNotification(title, message); + } + private async syncOnceInternal(syncConfig: CloudSyncConfig, fs: FileSystem) { this.logger.info("start sync once"); // 获取文件列表 const list = await fs.list(); // 根据文件名生成一个map - const uuidMap = new Map>(); + const uuidMap = groupFilesByUuid(list); // 储存文件摘要,用于检测文件是否有变化 - const fileDigestMap = ((await this.storage.get("file_digest")) as FileDigestMap) || {}; - - for (const file of list) { - if (file.name.endsWith(".user.js")) { - const uuid = file.name.substring(0, file.name.length - 8); - let files = uuidMap.get(uuid); - if (!files) { - files = {}; - uuidMap.set(uuid, files); - } - files.script = file; - } else if (file.name.endsWith(".meta.json")) { - const uuid = file.name.substring(0, file.name.length - 10); - let files = uuidMap.get(uuid); - if (!files) { - files = {}; - uuidMap.set(uuid, files); - } - files.meta = file; + const fileDigestMap = ((await this.storage.get(FILE_DIGEST_STORAGE_KEY)) as FileDigestMap) || {}; + const tombstoneDigestMap = ((await this.storage.get(TOMBSTONE_DIGEST_STORAGE_KEY)) as FileDigestMap) || {}; + let tombstoneDigestDirty = false; + const rememberTombstoneDigest = (metaFile: FileInfo) => { + if (!metaFile.digest || tombstoneDigestMap[metaFile.name] === metaFile.digest) { + return; } - } + // 记录“已确认是 tombstone 的 meta digest”。以后即使 provider 的 mtime 精度导致 + // meta/script 时间相等,也能继续识别并清理残留 .user.js;正常非 tombstone 不会多读远端 meta。 + tombstoneDigestMap[metaFile.name] = metaFile.digest; + tombstoneDigestDirty = true; + }; + const forgetTombstoneDigest = (metaFile: FileInfo) => { + if (!tombstoneDigestMap[metaFile.name]) { + return; + } + // 如果同名 meta 已确认不是 tombstone,旧的 tombstone 记录必须清掉; + // 否则后续每轮都会因为缓存命中而额外读取 meta。 + delete tombstoneDigestMap[metaFile.name]; + tombstoneDigestDirty = true; + }; // 获取脚本列表 const scriptList = await this.scriptDAO.all(); @@ -388,18 +473,29 @@ export class SynchronizeService { }); // 判断文件系统是否有脚本猫同步文件 - const file = list.find((file) => file.name === "scriptcat-sync.json"); - const scriptcatSync = { + const syncStatusFile = list.find((file) => file.name === SCRIPTCAT_SYNC_FILENAME); + let scriptcatSync = { version: ExtVersion, status: { scripts: {}, }, } as ScriptcatSync; let cloudStatus: ScriptcatSync["status"]["scripts"] = {}; - if (file) { + if (syncStatusFile) { // 如果有,则读取文件内容 - const cloudScriptCatSync = JSON.parse(await fs.open(file).then((f) => f.read("string"))) as ScriptcatSync; - cloudStatus = cloudScriptCatSync.status.scripts; + const cloudScriptCatSync = JSON.parse( + await fs.open(syncStatusFile).then((f) => f.read("string")) + ) as ScriptcatSync; + cloudStatus = cloudScriptCatSync.status?.scripts || {}; + // 保留云端 manifest 的未知字段,避免未来扩展字段被本机同步覆盖掉。 + scriptcatSync = { + ...cloudScriptCatSync, + version: ExtVersion, + status: { + ...cloudScriptCatSync.status, + scripts: {}, + }, + }; } // 对比脚本列表和文件列表,进行同步 @@ -410,19 +506,20 @@ export class SynchronizeService { const skippedOrphanUuids = new Set(); // 需要是同步操作,后续上传剩下的脚本 // 最后使用 Promise.allSettled 进行等待 - uuidMap.forEach((file, uuid) => { + for (const [uuid, remoteFiles] of uuidMap) { const script = scriptMap.get(uuid); if (script) { scriptMap.delete(uuid); // 脚本存在但是文件不存在,则读取.meta.json内容判断是否需要删除脚本 - if (!file.script) { + if (!remoteFiles.script) { result.push( (async () => { // 读取meta文件 - const meta = await fs.open(file.meta!); + const meta = await fs.open(remoteFiles.meta!); const metaJson = (await meta.read("string")) as string; const metaObj = JSON.parse(metaJson) as SyncMeta; if (metaObj.isDeleted) { + rememberTombstoneDigest(remoteFiles.meta!); // 删除脚本 await this.script.deleteScript(script.uuid, "sync"); InfoNotification( @@ -432,46 +529,83 @@ export class SynchronizeService { }) ); } else { + forgetTombstoneDigest(remoteFiles.meta!); // 否则认为是一个无效的.meta文件,进行删除,并进行同步 - await fs.delete(file.meta!.name); + await fs.delete(remoteFiles.meta!.name, getDeleteOptions(remoteFiles.meta)); return await this.pushScript(fs, script); } })() ); - return; + continue; + } + const remoteScript = remoteFiles.script; + const remoteMeta = remoteFiles.meta; + let checkedMetaObj: SyncMeta | undefined; + const scriptDigestUnchanged = fileDigestMap[remoteScript.name] === remoteScript.digest; + const metaDigestUnchanged = !remoteMeta || fileDigestMap[remoteMeta.name] === remoteMeta.digest; + const shouldCheckMetaTombstone = + remoteMeta && + (tombstoneDigestMap[remoteMeta.name] === remoteMeta.digest || + fileDigestMap[remoteMeta.name] !== remoteMeta.digest || + // 兼容没有 tombstone_digest 记录的旧版本/异常中断状态:digest cache 已经记录 tombstone meta, + // 但 .user.js 仍没删掉。meta 晚于 script 是删除标记的典型形态,才额外读一次 meta。 + // 这是启发式兜底,不作为严格协议;严格收敛依赖上面的 tombstone digest 记录。 + (scriptDigestUnchanged && metaDigestUnchanged && remoteMeta.updatetime > remoteScript.updatetime)); + if (remoteMeta && shouldCheckMetaTombstone) { + // tombstone 是删除提交信号,优先级高于 .user.js。 + // 如果上次删除在“写 tombstone 后、删 script 前”失败,下一轮会看到 script + tombstone。 + // 这里必须先处理 tombstone,不能因为 script digest 没变而跳过,否则删除可能长期无法收敛。 + checkedMetaObj = await readSyncMeta(fs, remoteMeta); + if (checkedMetaObj.isDeleted) { + rememberTombstoneDigest(remoteMeta); + result.push( + (async () => { + await this.script.deleteScript(script.uuid, "sync"); + await fs.delete(remoteScript.name, getDeleteOptions(remoteScript)); + InfoNotification( + i18n.t("notification.script_sync_delete"), + i18n.t("notification.script_sync_delete_desc", { + scriptName: i18nName(script), + }) + ); + })() + ); + continue; + } + forgetTombstoneDigest(remoteMeta); } // 过滤掉无变动的文件 - if (fileDigestMap[file.script!.name] === file.script!.digest) { - return; + if (scriptDigestUnchanged) { + continue; } const updatetime = script.updatetime || script.createtime; // 对比脚本更新时间和文件更新时间 - if (updatetime > file.script!.updatetime || !file.meta) { + if (updatetime > remoteFiles.script!.updatetime || !remoteFiles.meta) { // 如果脚本更新时间大于文件更新时间 // 或者不存在.meta文件,则上传文件 - result.push(this.pushScript(fs, script)); + result.push(this.pushScript(fs, script, remoteFiles)); } else { // 如果脚本更新时间小于文件更新时间,则更新脚本 updateScript.set(uuid, true); - result.push(this.pullScript(fs, file as SyncFiles, cloudStatus[uuid], script)); + result.push(this.pullScript(fs, remoteFiles as SyncFiles, cloudStatus[uuid], script, checkedMetaObj)); } - return; + continue; } // 如果脚本不存在,但文件存在,则安装脚本 - if (file.script) { - if (!file.meta) { + if (remoteFiles.script) { + if (!remoteFiles.meta) { // .meta 文件可能尚未上传完成,跳过本次以避免误删云端脚本 this.logger.warn("skip orphan cloud script without meta", { uuid, - file: file.script.name, + file: remoteFiles.script.name, }); skippedOrphanUuids.add(uuid); - return; + continue; } updateScript.set(uuid, true); - result.push(this.pullScript(fs, file as SyncFiles, cloudStatus[uuid])); + result.push(this.pullScript(fs, remoteFiles as SyncFiles, cloudStatus[uuid])); } - }); + } // 上传剩下的脚本 scriptMap.forEach((script) => { result.push(this.pushScript(fs, script)); @@ -484,11 +618,26 @@ export class SynchronizeService { Object.assign(pushedFileDigestMap, ret.value); } }); + if (tombstoneDigestDirty) { + // 本轮可能同时读到多个 meta,统一写一次本地 cache,避免旧记录较多时频繁 storage.set。 + // 即使后续同步任务失败也可以写入:这是“某个 meta digest 已确认是 tombstone”的辅助事实, + // 不会推进 file_digest 或 scriptcat-sync.json 成功状态,只帮助下一轮继续收敛残留删除。 + await this.storage.set(TOMBSTONE_DIGEST_STORAGE_KEY, tombstoneDigestMap); + } + const rejected = syncResults.filter((ret) => ret.status === "rejected"); + if (rejected.length) { + const hasConflict = rejected.some((ret) => isConflictError(ret.reason)); + rejected.forEach((ret, idx) => { + this.logger.warn(`sync task #${idx} failed`, Logger.E(ret.reason)); + }); + this.notifySyncFailed(hasConflict, rejected.length); + return; + } // 同步状态 if (syncConfig.syncStatus) { - const scriptlist = await this.scriptDAO.all(); - await Promise.allSettled( - scriptlist.map(async (script) => { + const latestScriptList = await this.scriptDAO.all(); + const statusResults = await Promise.allSettled( + latestScriptList.map(async (script) => { // 判断云端状态是否与本地状态一致 const status = cloudStatus[script.uuid]; const updatetime = script.updatetime || script.createtime; @@ -533,6 +682,11 @@ export class SynchronizeService { } }) ); + const rejectedStatus = statusResults.filter((ret) => ret.status === "rejected"); + if (rejectedStatus.length) { + this.notifySyncFailed(false, rejectedStatus.length); + return; + } // 保留被跳过的 orphan uuid 的云端 status,避免覆盖另一台设备半上传的状态 skippedOrphanUuids.forEach((uuid) => { const status = cloudStatus[uuid]; @@ -542,9 +696,15 @@ export class SynchronizeService { }); // 保存脚本猫同步状态 const modifiedDate = Date.now(); - const syncFile = await fs.create("scriptcat-sync.json", { modifiedDate }); - await syncFile.write(JSON.stringify(scriptcatSync, null, 2)); - this.logger.info("sync scriptcat-sync.json file success"); + try { + const syncFile = await fs.create(SCRIPTCAT_SYNC_FILENAME, getWriteOptions(modifiedDate, syncStatusFile)); + await syncFile.write(JSON.stringify(scriptcatSync, null, 2)); + this.logger.info("sync scriptcat-sync.json file success"); + } catch (e) { + this.logger.error("sync scriptcat-sync.json file error", Logger.E(e)); + this.notifySyncFailed(isConflictError(e), 1); + return; + } } // 重新获取文件列表,保存文件摘要 this.logger.info("update file digest"); @@ -554,11 +714,40 @@ export class SynchronizeService { } async updateFileDigest(fs: FileSystem, knownFileDigestMap: FileDigestMap = {}) { - const newList = await fs.list(); - const newFileDigestMap: FileDigestMap = {}; + let newList = await fs.list(); + // 有些远端在刚上传后 list 会短暂漏掉新对象;只在“文件名完全没出现”时重试一次。 + // 如果文件名出现但 digest 还是旧值,仍保留 provider 返回值,避免用本地 MD5 污染 etag/rev/hash。 + // 这个取舍可能导致下一轮重复同步或误判变更,但不会把 provider 原生 digest 缓存成错误格式。 + if (Object.keys(knownFileDigestMap).some((name) => !newList.some((file) => file.name === name))) { + newList = await fs.list(); + } + const listedFileDigestMap: FileDigestMap = {}; for (const file of newList) { - newFileDigestMap[file.name] = file.digest; + listedFileDigestMap[file.name] = file.digest; } + const tombstoneDigestMap = ((await this.storage.get(TOMBSTONE_DIGEST_STORAGE_KEY)) as FileDigestMap) || {}; + if (Object.keys(tombstoneDigestMap).length) { + let changed = false; + const nextTombstoneDigestMap: FileDigestMap = {}; + for (const name in tombstoneDigestMap) { + if (listedFileDigestMap[name] === tombstoneDigestMap[name]) { + nextTombstoneDigestMap[name] = tombstoneDigestMap[name]; + } else if (!(name in listedFileDigestMap)) { + // list 在部分后端可能短暂漏文件。不要因为一次没看到 meta 就丢掉 tombstone cache, + // 否则残留 .user.js 的收敛会退回到 mtime 启发式。 + nextTombstoneDigestMap[name] = tombstoneDigestMap[name]; + } else { + changed = true; + } + } + if (changed) { + // tombstone 标记只用于“已确认删除 meta”的收敛加速。 + // 只有同名 meta 仍在但 digest 已变化时才清理;meta 暂时没出现在 list 时先保留, + // 避免最终一致性/缓存导致下一轮丢失 tombstone 收敛信号。 + await this.storage.set(TOMBSTONE_DIGEST_STORAGE_KEY, nextTombstoneDigestMap); + } + } + const newFileDigestMap: FileDigestMap = { ...listedFileDigestMap }; // 各后端 digest 格式不一(WebDAV/OneDrive/S3 是 etag、Dropbox 是 content_hash、Zip 为空, // 仅 GoogleDrive/Baidu 是 md5),只在云端列表暂时漏掉刚上传的文件时用本地 md5 兜底, // 不能覆盖 fs.list 已返回的原生 digest,否则下次同步比对会因格式不一致而误判 @@ -567,23 +756,30 @@ export class SynchronizeService { newFileDigestMap[name] = knownFileDigestMap[name]; } } - await this.storage.set("file_digest", newFileDigestMap); + await this.storage.set(FILE_DIGEST_STORAGE_KEY, newFileDigestMap); return; } // 删除云端脚本数据 - async deleteCloudScript(fs: FileSystem, uuid: string, syncDelete: boolean) { + async deleteCloudScript(fs: FileSystem, uuid: string, syncDelete: boolean, remoteFiles?: Partial) { const filename = `${uuid}.user.js`; const logger = this.logger.with({ uuid: uuid, file: filename, }); try { - await fs.delete(filename); + // 只有调用方没有远端快照,或快照明确看到 script 时才删除。 + // 如果快照存在但没看到文件,跳过删除,避免最终一致性/list 缓存漏文件时退化成无条件删除。 + if (!remoteFiles || remoteFiles.script) { + await fs.delete(filename, getDeleteOptions(remoteFiles?.script)); + } if (syncDelete) { - // 留下一个.meta.json删除标记 + // 删除协议仍以 .meta.json tombstone 作为对其他设备的提交信号。 + // 注意:当前不是事务写入。script 已删但 tombstone 写失败时,上层会报错且不推进 digest, + // 但远端仍可能短暂处于半提交状态;彻底解决需要 manifest/commit 协议。 + // 不在这里补偿恢复 script:恢复也是一次写入,可能覆盖另一台设备在失败窗口内的新版本。 const modifiedDate = Date.now(); - const meta = await fs.create(`${uuid}.meta.json`, { modifiedDate }); + const meta = await fs.create(`${uuid}.meta.json`, getWriteOptions(modifiedDate, remoteFiles?.meta)); await meta.write( JSON.stringify({ uuid: uuid, @@ -595,17 +791,21 @@ export class SynchronizeService { ); } else { // 直接删除所有相关文件 - await fs.delete(`${uuid}.meta.json`); + // 同 script 删除一样,快照存在但没看到 meta 时不做无条件删除。 + if (!remoteFiles || remoteFiles.meta) { + await fs.delete(`${uuid}.meta.json`, getDeleteOptions(remoteFiles?.meta)); + } } logger.info("delete success"); } catch (e) { logger.error("delete file error", Logger.E(e)); + throw e; } return; } // 上传脚本 - async pushScript(fs: FileSystem, script: PushScriptParam): Promise { + async pushScript(fs: FileSystem, script: PushScriptParam, remoteFiles?: Partial): Promise { const filename = `${script.uuid}.user.js`; const metaFilename = `${script.uuid}.meta.json`; const logger = this.logger.with({ @@ -615,22 +815,39 @@ export class SynchronizeService { }); try { const modifiedDate = getScriptModifiedDate(script); - const w = await fs.create(filename, { modifiedDate }); // 获取脚本代码 const code = await this.scriptCodeDAO.get(script.uuid); const scriptCode = code!.code; - await w.write(scriptCode); - const meta = await fs.create(metaFilename, { modifiedDate }); const metaJson = JSON.stringify({ uuid: script.uuid, origin: script.origin, downloadUrl: script.downloadUrl, checkUpdateUrl: script.checkUpdateUrl, }); - await meta.write(metaJson); + const scriptDigest = md5OfText(scriptCode); + let scriptWritten = false; + + try { + const w = await fs.create(filename, getWriteOptions(modifiedDate, remoteFiles?.script)); + await w.write(scriptCode); + scriptWritten = true; + const meta = await fs.create(metaFilename, getWriteOptions(modifiedDate, remoteFiles?.meta)); + await meta.write(metaJson); + } catch (e) { + if (scriptWritten && !remoteFiles?.script) { + // 只清理“本次新建 script 成功但 meta 写失败”的孤儿文件,且必须带 digest 守卫。 + // 这个 digest 是本地 MD5,部分 provider 的远端 digest/etag 不同,清理可能失败; + // 清理失败只会留下 orphan,下次同步会跳过 orphan,不应为了清理而改成无条件删除。 + // 这里不影响正常删除操作:cleanup 只发生在 push 失败路径,失败也会保留原始错误继续上抛。 + await fs.delete(filename, { expectedDigest: scriptDigest }).catch((cleanupError) => { + logger.warn("cleanup newly created script after meta write failure failed", Logger.E(cleanupError)); + }); + } + throw e; + } logger.info("push script success"); return { - [filename]: md5OfText(scriptCode), + [filename]: scriptDigest, [metaFilename]: md5OfText(metaJson), }; } catch (e) { @@ -639,20 +856,32 @@ export class SynchronizeService { } } - async pullScript(fs: FileSystem, file: SyncFiles, status: ScriptcatSyncStatus | undefined, existingScript?: Script) { + async pullScript( + fs: FileSystem, + file: SyncFiles, + status: ScriptcatSyncStatus | undefined, + existingScript?: Script, + knownMetaObj?: SyncMeta + ) { const logger = this.logger.with({ uuid: existingScript?.uuid || "", name: existingScript?.name || "", file: file.script.name, }); try { - // 读取代码文件 + // 先读 meta。tombstone 是删除提交信号,命中后不需要、也不应该依赖 .user.js 仍可读取。 + const metaObj = knownMetaObj || (await readSyncMeta(fs, file.meta)); + if (metaObj.isDeleted) { + if (existingScript) { + await this.script.deleteScript(existingScript.uuid, "sync"); + } + await fs.delete(file.script.name, getDeleteOptions(file.script)); + logger.info("pull tombstone delete success"); + return; + } + // 只有确认不是 tombstone 后才读取脚本内容,避免删除路径被残留/已删除的 .user.js 阻塞。 const r = await fs.open(file.script); const code = (await r.read("string")) as string; - // 读取meta文件 - const meta = await fs.open(file.meta); - const metaJson = (await meta.read("string")) as string; - const metaObj = JSON.parse(metaJson) as SyncMeta; const { script } = await prepareScriptByCode( code, existingScript?.downloadUrl || metaObj.downloadUrl || "", @@ -680,6 +909,7 @@ export class SynchronizeService { logger.info("pull script success"); } catch (e) { logger.error("pull script error", Logger.E(e)); + throw e; } } @@ -732,10 +962,17 @@ export class SynchronizeService { if (config.enable) { stackAsyncTask(SYNC_SERVICE_TASK_KEY, async () => { const fs = await this.buildFileSystem(config); - const pushedFileDigestMap = await this.pushScript(fs, params.script); + const list = await fs.list(); + const uuid = params.script.uuid; + const remoteFiles: Partial = { + script: list.find((file) => file.name === `${uuid}.user.js`), + meta: list.find((file) => file.name === `${uuid}.meta.json`), + }; + const pushedFileDigestMap = await this.pushScript(fs, params.script, remoteFiles); await this.updateFileDigest(fs, pushedFileDigestMap); }).catch((e) => { this.logger.error("push script on install error", Logger.E(e)); + this.notifySyncFailed(isConflictError(e), 1); }); } } @@ -752,12 +989,17 @@ export class SynchronizeService { if (config.enable) { stackAsyncTask(SYNC_SERVICE_TASK_KEY, async () => { const fs = await this.buildFileSystem(config); + const list = await fs.list(); for (const { uuid } of items) { - await this.deleteCloudScript(fs, uuid, config.syncDelete); + await this.deleteCloudScript(fs, uuid, config.syncDelete, { + script: list.find((file) => file.name === `${uuid}.user.js`), + meta: list.find((file) => file.name === `${uuid}.meta.json`), + }); } await this.updateFileDigest(fs); }).catch((e) => { this.logger.error("delete cloud script error", Logger.E(e)); + this.notifySyncFailed(isConflictError(e), 1); }); } } diff --git a/src/locales/de-DE/translation.json b/src/locales/de-DE/translation.json index c05e9bce8..daafc2df2 100644 --- a/src/locales/de-DE/translation.json +++ b/src/locales/de-DE/translation.json @@ -569,6 +569,9 @@ "notification": { "script_sync_delete": "Skript-Löschsynchronisation", "script_sync_delete_desc": "Skript {{scriptName}} wurde gelöscht", + "script_sync_failed": "Skriptsynchronisierung fehlgeschlagen", + "script_sync_failed_desc": "Einige Cloud-Synchronisierungsänderungen sind fehlgeschlagen. Der lokale Synchronisierungsstatus wurde nicht aktualisiert; bitte prüfen Sie die Protokolle und synchronisieren Sie erneut.", + "script_sync_conflict_desc": "Die Cloud-Synchronisierung hat einen Remote-Konflikt erkannt. Der lokale Synchronisierungsstatus wurde nicht aktualisiert; bitte prüfen Sie Änderungen auf anderen Geräten und synchronisieren Sie erneut.", "subscribe_update": "Abonnement {{subscribeName}} wurde aktualisiert", "subscribe_update_desc": "Neue Skripte: {{newScripts}}\nGelöschte Skripte: {{deletedScripts}}" }, diff --git a/src/locales/en-US/translation.json b/src/locales/en-US/translation.json index f67746b49..f6bb21ec4 100644 --- a/src/locales/en-US/translation.json +++ b/src/locales/en-US/translation.json @@ -569,6 +569,9 @@ "notification": { "script_sync_delete": "Script Sync Delete", "script_sync_delete_desc": "Script {{scriptName}} has been deleted", + "script_sync_failed": "Script Sync Failed", + "script_sync_failed_desc": "Some cloud sync changes failed. Local sync state was not updated; please check the logs and try syncing again.", + "script_sync_conflict_desc": "Cloud sync detected a remote conflict. Local sync state was not updated; please review changes on other devices and sync again.", "subscribe_update": "Subscribe {{subscribeName}} has been updated", "subscribe_update_desc": "New scripts: {{newScripts}}\nDeleted scripts: {{deletedScripts}}" }, diff --git a/src/locales/ja-JP/translation.json b/src/locales/ja-JP/translation.json index 462991dab..77671fcf2 100644 --- a/src/locales/ja-JP/translation.json +++ b/src/locales/ja-JP/translation.json @@ -569,6 +569,9 @@ "notification": { "script_sync_delete": "スクリプト同期削除", "script_sync_delete_desc": "スクリプト {{scriptName}} が削除されました", + "script_sync_failed": "スクリプト同期に失敗しました", + "script_sync_failed_desc": "一部のクラウド同期の変更に失敗しました。ローカルの同期状態は更新されていません。ログを確認してから再同期してください。", + "script_sync_conflict_desc": "クラウド同期でリモートの競合が検出されました。ローカルの同期状態は更新されていません。他のデバイスでの変更を確認してから再同期してください。", "subscribe_update": "サブスクリプション {{subscribeName}} が更新されました", "subscribe_update_desc": "新しいスクリプト: {{newScripts}}\n削除されたスクリプト: {{deletedScripts}}" }, diff --git a/src/locales/ru-RU/translation.json b/src/locales/ru-RU/translation.json index 131d5e421..764b8415d 100644 --- a/src/locales/ru-RU/translation.json +++ b/src/locales/ru-RU/translation.json @@ -569,6 +569,9 @@ "notification": { "script_sync_delete": "Синхронизация удаления скрипта", "script_sync_delete_desc": "Скрипт {{scriptName}} был удален", + "script_sync_failed": "Не удалось синхронизировать скрипт", + "script_sync_failed_desc": "Не удалось применить часть изменений облачной синхронизации. Локальное состояние синхронизации не обновлено; проверьте журналы и повторите синхронизацию.", + "script_sync_conflict_desc": "Облачная синхронизация обнаружила конфликт на удаленной стороне. Локальное состояние синхронизации не обновлено; проверьте изменения на других устройствах и повторите синхронизацию.", "subscribe_update": "Подписка {{subscribeName}} была обновлена", "subscribe_update_desc": "Новые скрипты: {{newScripts}}\nУдаленные скрипты: {{deletedScripts}}" }, diff --git a/src/locales/vi-VN/translation.json b/src/locales/vi-VN/translation.json index 558153a1f..a090770f5 100644 --- a/src/locales/vi-VN/translation.json +++ b/src/locales/vi-VN/translation.json @@ -569,6 +569,9 @@ "notification": { "script_sync_delete": "Đồng bộ xóa script", "script_sync_delete_desc": "Script {{scriptName}} đã bị xóa", + "script_sync_failed": "Đồng bộ script thất bại", + "script_sync_failed_desc": "Một số thay đổi đồng bộ đám mây thất bại. Trạng thái đồng bộ cục bộ chưa được cập nhật; vui lòng kiểm tra nhật ký rồi đồng bộ lại.", + "script_sync_conflict_desc": "Đồng bộ đám mây phát hiện xung đột từ xa. Trạng thái đồng bộ cục bộ chưa được cập nhật; vui lòng kiểm tra thay đổi trên các thiết bị khác rồi đồng bộ lại.", "subscribe_update": "Đăng ký {{subscribeName}} đã được cập nhật", "subscribe_update_desc": "Script mới: {{newScripts}}\nScript đã xóa: {{deletedScripts}}" }, diff --git a/src/locales/zh-CN/translation.json b/src/locales/zh-CN/translation.json index f0f2998c0..ddf04d0b8 100644 --- a/src/locales/zh-CN/translation.json +++ b/src/locales/zh-CN/translation.json @@ -569,6 +569,9 @@ "notification": { "script_sync_delete": "脚本删除同步", "script_sync_delete_desc": "脚本 {{scriptName}} 已被删除", + "script_sync_failed": "脚本同步失败", + "script_sync_failed_desc": "部分云同步变更失败,本地同步状态未更新,请检查日志后重新同步。", + "script_sync_conflict_desc": "云同步检测到远端冲突,本地同步状态未更新,请检查其他设备上的变更后重新同步。", "subscribe_update": "订阅 {{subscribeName}} 已更新", "subscribe_update_desc": "新增脚本:{{newScripts}}\n删除脚本:{{deletedScripts}}" }, diff --git a/src/locales/zh-TW/translation.json b/src/locales/zh-TW/translation.json index 36c20530e..1a1c23ad3 100644 --- a/src/locales/zh-TW/translation.json +++ b/src/locales/zh-TW/translation.json @@ -569,6 +569,9 @@ "notification": { "script_sync_delete": "腳本刪除同步", "script_sync_delete_desc": "腳本 {{scriptName}} 已被刪除", + "script_sync_failed": "腳本同步失敗", + "script_sync_failed_desc": "部分雲端同步變更失敗,本機同步狀態未更新,請檢查日誌後重新同步。", + "script_sync_conflict_desc": "雲端同步偵測到遠端衝突,本機同步狀態未更新,請檢查其他裝置上的變更後重新同步。", "subscribe_update": "訂閱 {{subscribeName}} 已更新", "subscribe_update_desc": "新增腳本:{{newScripts}}\n刪除腳本:{{deletedScripts}}" },