From 236dcd893064baeebecca0aeb70f5cabd475aff5 Mon Sep 17 00:00:00 2001 From: xiaomoziyi <823346486@qq.com> Date: Tue, 17 Mar 2026 21:48:21 +0800 Subject: [PATCH 01/21] feat: add Cloudflare Workers deployment for Media Kit Migrate Media Kit from Blocklet Server (Express + SQLite + local disk) to Cloudflare Workers (Hono + D1 + R2), enabling edge deployment with zero server maintenance. Backend (cloudflare/src/): - Hono API server with full upload/folder/serve/status routes - Presigned URL upload flow with multipart support - R2 file storage with MD5 content dedup - D1 database with Drizzle ORM - AIGNE Hub proxy for AI Image generation - SPA fallback for client-side routing Frontend (cloudflare/frontend/): - Reuses original blocklets/image-bin/src/ via Vite aliases - Shim layer replaces @blocklet/* and @arcblock/* SDK dependencies - window.blocklet injected in index.html for module-level access - path-browserify polyfill for browser compatibility Shared code changes (backward compatible): - uploader: support both URL and base64 for AI Image results - uploader: add presigned-upload Uppy plugin - uploader: add .catch() for AI Image URL fetch Deployed at: https://media-kit.yexiaofang.workers.dev TODO: - Replace x-user-did auth with CF auth SDK (shijun) - Restrict CORS origin after auth is ready - Production: use S3 CopyObject instead of R2 binding get+put for large files Co-Authored-By: Claude Opus 4.6 (1M context) --- cloudflare/.gitignore | 16 + cloudflare/README.md | 221 ++ cloudflare/docs/CHANGES.md | 266 ++ cloudflare/docs/MIGRATION-PLAN.md | 1319 ++++++++ cloudflare/docs/MIGRATION-REPORT.md | 277 ++ cloudflare/drizzle.config.ts | 7 + cloudflare/frontend/index.html | 48 + cloudflare/frontend/package.json | 67 + cloudflare/frontend/src/App.tsx | 143 + cloudflare/frontend/src/api.ts | 14 + .../frontend/src/components/FolderList.tsx | 89 + .../frontend/src/components/ImageGrid.tsx | 128 + .../frontend/src/components/UploadButton.tsx | 67 + cloudflare/frontend/src/main.tsx | 7 + .../frontend/src/plugins/presigned-upload.ts | 127 + cloudflare/frontend/src/shims/arcblock-did.ts | 8 + cloudflare/frontend/src/shims/arcblock-ux.tsx | 184 ++ .../frontend/src/shims/blocklet-js-sdk.ts | 11 + .../src/shims/blocklet-ui-react-dashboard.tsx | 36 + .../src/shims/blocklet-ui-react-footer.tsx | 14 + .../src/shims/blocklet-ui-react-header.tsx | 20 + .../frontend/src/shims/blocklet-ui-react.tsx | 18 + .../src/shims/did-connect-react-button.tsx | 13 + .../src/shims/did-connect-react-session.tsx | 42 + cloudflare/frontend/src/shims/ux-button.tsx | 1 + cloudflare/frontend/src/shims/ux-center.tsx | 1 + cloudflare/frontend/src/shims/ux-config.tsx | 1 + cloudflare/frontend/src/shims/ux-dialog.tsx | 1 + cloudflare/frontend/src/shims/ux-empty.tsx | 1 + .../frontend/src/shims/ux-locale-context.tsx | 1 + cloudflare/frontend/src/shims/ux-result.tsx | 1 + .../frontend/src/shims/ux-split-button.tsx | 1 + cloudflare/frontend/src/shims/ux-toast.tsx | 1 + .../frontend/src/shims/ux-with-tracker.tsx | 1 + cloudflare/frontend/tsconfig.json | 16 + cloudflare/frontend/vite.config.ts | 72 + cloudflare/migrations/0001_initial.sql | 58 + cloudflare/package.json | 33 + cloudflare/pnpm-lock.yaml | 2812 +++++++++++++++++ cloudflare/scripts/migrate-data.ts | 140 + cloudflare/src/__tests__/utils.test.ts | 84 + .../src/__tests__/worker.integration.ts | 90 + cloudflare/src/db/schema.ts | 56 + cloudflare/src/middleware/auth.ts | 32 + cloudflare/src/routes/cleanup.ts | 49 + cloudflare/src/routes/folders.ts | 77 + cloudflare/src/routes/serve.ts | 68 + cloudflare/src/routes/status.ts | 29 + cloudflare/src/routes/unsplash.ts | 115 + cloudflare/src/routes/upload.ts | 725 +++++ cloudflare/src/types.ts | 84 + cloudflare/src/utils/hash.ts | 128 + cloudflare/src/utils/s3.ts | 173 + cloudflare/src/worker.ts | 118 + cloudflare/tsconfig.json | 23 + cloudflare/vitest.config.ts | 7 + cloudflare/wrangler.toml | 45 + .../ai-image/show-panel/output/index.tsx | 6 +- .../src/react/plugins/presigned-upload.ts | 291 ++ packages/uploader/src/react/uploader.tsx | 335 +- pnpm-lock.yaml | 377 ++- 61 files changed, 9054 insertions(+), 141 deletions(-) create mode 100644 cloudflare/.gitignore create mode 100644 cloudflare/README.md create mode 100644 cloudflare/docs/CHANGES.md create mode 100644 cloudflare/docs/MIGRATION-PLAN.md create mode 100644 cloudflare/docs/MIGRATION-REPORT.md create mode 100644 cloudflare/drizzle.config.ts create mode 100644 cloudflare/frontend/index.html create mode 100644 cloudflare/frontend/package.json create mode 100644 cloudflare/frontend/src/App.tsx create mode 100644 cloudflare/frontend/src/api.ts create mode 100644 cloudflare/frontend/src/components/FolderList.tsx create mode 100644 cloudflare/frontend/src/components/ImageGrid.tsx create mode 100644 cloudflare/frontend/src/components/UploadButton.tsx create mode 100644 cloudflare/frontend/src/main.tsx create mode 100644 cloudflare/frontend/src/plugins/presigned-upload.ts create mode 100644 cloudflare/frontend/src/shims/arcblock-did.ts create mode 100644 cloudflare/frontend/src/shims/arcblock-ux.tsx create mode 100644 cloudflare/frontend/src/shims/blocklet-js-sdk.ts create mode 100644 cloudflare/frontend/src/shims/blocklet-ui-react-dashboard.tsx create mode 100644 cloudflare/frontend/src/shims/blocklet-ui-react-footer.tsx create mode 100644 cloudflare/frontend/src/shims/blocklet-ui-react-header.tsx create mode 100644 cloudflare/frontend/src/shims/blocklet-ui-react.tsx create mode 100644 cloudflare/frontend/src/shims/did-connect-react-button.tsx create mode 100644 cloudflare/frontend/src/shims/did-connect-react-session.tsx create mode 100644 cloudflare/frontend/src/shims/ux-button.tsx create mode 100644 cloudflare/frontend/src/shims/ux-center.tsx create mode 100644 cloudflare/frontend/src/shims/ux-config.tsx create mode 100644 cloudflare/frontend/src/shims/ux-dialog.tsx create mode 100644 cloudflare/frontend/src/shims/ux-empty.tsx create mode 100644 cloudflare/frontend/src/shims/ux-locale-context.tsx create mode 100644 cloudflare/frontend/src/shims/ux-result.tsx create mode 100644 cloudflare/frontend/src/shims/ux-split-button.tsx create mode 100644 cloudflare/frontend/src/shims/ux-toast.tsx create mode 100644 cloudflare/frontend/src/shims/ux-with-tracker.tsx create mode 100644 cloudflare/frontend/tsconfig.json create mode 100644 cloudflare/frontend/vite.config.ts create mode 100644 cloudflare/migrations/0001_initial.sql create mode 100644 cloudflare/package.json create mode 100644 cloudflare/pnpm-lock.yaml create mode 100644 cloudflare/scripts/migrate-data.ts create mode 100644 cloudflare/src/__tests__/utils.test.ts create mode 100644 cloudflare/src/__tests__/worker.integration.ts create mode 100644 cloudflare/src/db/schema.ts create mode 100644 cloudflare/src/middleware/auth.ts create mode 100644 cloudflare/src/routes/cleanup.ts create mode 100644 cloudflare/src/routes/folders.ts create mode 100644 cloudflare/src/routes/serve.ts create mode 100644 cloudflare/src/routes/status.ts create mode 100644 cloudflare/src/routes/unsplash.ts create mode 100644 cloudflare/src/routes/upload.ts create mode 100644 cloudflare/src/types.ts create mode 100644 cloudflare/src/utils/hash.ts create mode 100644 cloudflare/src/utils/s3.ts create mode 100644 cloudflare/src/worker.ts create mode 100644 cloudflare/tsconfig.json create mode 100644 cloudflare/vitest.config.ts create mode 100644 cloudflare/wrangler.toml create mode 100644 packages/uploader/src/react/plugins/presigned-upload.ts diff --git a/cloudflare/.gitignore b/cloudflare/.gitignore new file mode 100644 index 0000000..b12db96 --- /dev/null +++ b/cloudflare/.gitignore @@ -0,0 +1,16 @@ +# Dependencies +node_modules/ + +# Build output +public/ + +# Wrangler +.wrangler/ +.dev.vars + +# Frontend +frontend/node_modules/ +frontend/node_modules/.vite/ + +# OS +.DS_Store diff --git a/cloudflare/README.md b/cloudflare/README.md new file mode 100644 index 0000000..21704d1 --- /dev/null +++ b/cloudflare/README.md @@ -0,0 +1,221 @@ +# Media Kit — Cloudflare Workers 部署指南 + +## 前置条件 + +- Node.js >= 18 +- pnpm +- Cloudflare 账号(免费即可) +- Wrangler CLI:`npm install -g wrangler` + +## 一次性初始化(首次部署) + +### 1. 登录 Cloudflare + +```bash +wrangler login +``` + +### 2. 创建 D1 数据库 + +```bash +wrangler d1 create media-kit-db +``` + +输出中的 `database_id` 填入 `wrangler.toml` 第 20 行。 + +### 3. 创建 R2 Bucket + +先在 Dashboard 激活 R2 服务(R2 → 激活),然后: + +```bash +wrangler r2 bucket create media-kit-uploads +``` + +### 4. 配置 R2 CORS + +Dashboard → R2 → media-kit-uploads → 设置 → CORS 策略,添加: + +```json +[ + { + "AllowedOrigins": ["https://your-domain.com"], + "AllowedMethods": ["GET", "PUT", "HEAD"], + "AllowedHeaders": ["Content-Type"], + "MaxAgeSeconds": 86400 + } +] +``` + +`AllowedOrigins` 填实际部署的域名。 + +### 5. 生成 R2 API Token + +Dashboard → R2 → 管理 R2 API 令牌 → 创建 API 令牌: +- 权限:对象读和写 +- Bucket:media-kit-uploads +- 记录 `Access Key ID` 和 `Secret Access Key` + +### 6. 配置 Secrets + +```bash +cd cloudflare + +# R2 凭证 +wrangler secret put R2_ACCESS_KEY_ID +wrangler secret put R2_SECRET_ACCESS_KEY + +# Cloudflare Account ID(Dashboard 右侧边栏可见) +wrangler secret put CF_ACCOUNT_ID + +# AIGNE Hub API Key(AI Image 功能需要) +wrangler secret put AIGNE_HUB_API_KEY + +# 可选:Unsplash API +wrangler secret put UNSPLASH_KEY +wrangler secret put UNSPLASH_SECRET +``` + +### 7. 应用数据库迁移 + +```bash +wrangler d1 migrations apply media-kit-db --remote +``` + +## 部署 + +```bash +cd cloudflare + +# 构建前端 + 部署 Worker +npm run deploy +``` + +等价于: +```bash +cd frontend && npx vite build # 构建前端到 public/ +cd .. && wrangler deploy # 部署 Worker + 静态资源 +``` + +部署后访问 `https://media-kit..workers.dev` + +## 数据迁移(从 Blocklet Server 迁移) + +如果需要从现有 Blocklet Server 迁移数据: + +### 1. 迁移文件(本地磁盘/S3 → R2) + +```bash +# 使用 rclone 同步文件 +rclone sync /path/to/blocklet/uploads r2:media-kit-uploads/ + +# 如果源是 S3 +rclone sync s3:old-bucket r2:media-kit-uploads/ +``` + +### 2. 迁移数据库(SQLite → D1) + +```bash +cd cloudflare + +# 先试运行看数据量 +npx tsx scripts/migrate-data.ts --source /path/to/media-kit.db --dry-run + +# 正式迁移 +npx tsx scripts/migrate-data.ts --source /path/to/media-kit.db --d1-name media-kit-db +``` + +### 3. 切换 DNS + +确认迁移数据完整后,将域名 DNS 指向 Cloudflare Workers。 + +## CI/CD 集成 + +GitHub Actions 示例: + +```yaml +name: Deploy Media Kit +on: + push: + branches: [main] + paths: ['cloudflare/**'] + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: 20 + - run: npm install -g pnpm + - run: cd cloudflare/frontend && pnpm install && npx vite build + - run: cd cloudflare && npx wrangler deploy + env: + CLOUDFLARE_API_TOKEN: ${{ secrets.CF_API_TOKEN }} +``` + +在 GitHub repo Settings → Secrets 里添加 `CF_API_TOKEN`(Dashboard → My Profile → API Tokens → Create Token)。 + +## 本地开发 + +```bash +cd cloudflare + +# 创建 .dev.vars 文件 +cat > .dev.vars << 'EOF' +ENVIRONMENT=development +R2_ACCESS_KEY_ID=your-key +R2_SECRET_ACCESS_KEY=your-secret +CF_ACCOUNT_ID=your-account-id +AIGNE_HUB_API_KEY=your-aigne-key +EOF + +# 初始化本地数据库 +npm run db:migrate:local + +# 启动 Worker(终端 1) +npm run dev + +# 启动前端开发服务器(终端 2) +cd frontend && npx vite --port 3030 +``` + +## 架构说明 + +``` +cloudflare/ + src/ # CF Worker 后端(Hono + D1 + R2) + worker.ts # 入口:路由 + AIGNE Hub 代理 + routes/ + upload.ts # 上传:presign / proxy-put / direct / confirm + serve.ts # 文件服务:R2 → 响应(生产用 cf.image) + folders.ts # 文件夹 CRUD + status.ts # Uploader 配置 + unsplash.ts # Unsplash 代理 + cleanup.ts # 定时清理过期 session + middleware/auth.ts # x-user-did 认证 + db/schema.ts # Drizzle ORM 表定义 + utils/ + s3.ts # R2 S3 兼容 API(presigned URL、multipart) + hash.ts # MD5 哈希、MIME 检测、SVG 净化 + frontend/ # 前端构建配置 + vite.config.ts # Alias 指向原版源码 + shim + src/shims/ # Blocklet SDK 替代实现 + index.html # window.blocklet 注入 + public/ # vite build 产物(Worker 静态资源) + wrangler.toml # CF Workers 配置 + migrations/ # D1 数据库迁移 + scripts/migrate-data.ts # SQLite → D1 迁移脚本 +``` + +前端源码复用 `blocklets/image-bin/src/`,通过 Vite alias 将 `@blocklet/*` 和 `@arcblock/*` 依赖替换为 `frontend/src/shims/` 中的轻量实现。 + +## 环境差异 + +| 特性 | 本地开发 | 线上 | +|------|---------|------| +| R2 存储 | miniflare 本地模拟 | 真实 R2 | +| D1 数据库 | 本地 SQLite | 真实 D1 | +| Presigned URL | proxy-put 代理(避免 CORS) | 直传 R2(需配 CORS) | +| 文件服务 | R2 binding 直接读取 | cf.image EXIF 剥离 + 自动 WebP | +| 图片生成 | 代理到 hub.aigne.io | 代理到 hub.aigne.io | diff --git a/cloudflare/docs/CHANGES.md b/cloudflare/docs/CHANGES.md new file mode 100644 index 0000000..bc8d3e4 --- /dev/null +++ b/cloudflare/docs/CHANGES.md @@ -0,0 +1,266 @@ +# Media Kit — Cloudflare Workers 迁移改动总结 + +## 一、背景 + +### 项目现状 + +Media Kit 是 ArcBlock 的媒体文件管理组件,核心包括: + +- **Blocklet 后端** (`blocklets/image-bin/`):Node.js + Express + Sequelize + 本地存储,使用 TUS 协议处理文件上传 +- **前端上传组件** (`packages/uploader/`):基于 Uppy 的 `` React 组件,被多个业务方(消费者)通过 `onUploadFinish={(result) => doSomething(result.data)}` 使用 +- **Cloudflare Worker** (`cloudflare/`):一个独立的 CF Worker 实现(Hono + D1 + R2),使用 presigned URL 上传协议 + +### 问题 + +1. **前端只支持 TUS**:`@blocklet/uploader` 的上传逻辑硬编码了 TUS 协议(`@uppy/tus` 插件),无法对接 CF Worker 的 presigned URL 上传 +2. **CF Worker 有残留依赖**:之前尝试过 adapter pattern 方案(引入 `@blocklet/media-kit-core` 共享包),后来方案废弃,但 CF Worker 代码中仍残留了对 `@blocklet/media-kit-core` 的依赖和 adapters 目录,而该包已不存在,导致 CF Worker 无法编译 +3. **响应格式不一致**:CF Worker 的 confirm 端点返回 `{ id, created_at }` 等字段,而 Blocklet 端返回 `{ _id, createdAt }` 格式,消费者代码依赖后者 + +### 已废弃的方案 + +之前考虑过 adapter pattern(引入 `@blocklet/media-kit-core` 包,定义 `IDatabaseAdapter` / `IStorageAdapter` / `IConfigAdapter` 接口,让 Blocklet 和 CF Worker 共享 ~145 行业务逻辑)。该方案因过度抽象被废弃:共享逻辑量太少,不值得引入新包和接口层。 + +### 新方案原则 + +- **Blocklet 代码不动**:零改动,零风险 +- **CF Worker 保持独立实现**:不引入共享包,直接内联 Drizzle 查询 +- **前端最小改动**:只加 presigned 模式的条件分支 +- **消费者零感知**:`result.data` 结构在两种模式下完全一致 + +--- + +## 二、改动清单 + +### 文件总览 + +| 文件 | 操作 | 行数 | 说明 | +|------|------|------|------| +| `cloudflare/src/adapters/` | **DELETE** | -4 files | 删除废弃的 adapter pattern 残留 | +| `cloudflare/package.json` | MODIFY | -1 行 | 移除 `@blocklet/media-kit-core` 依赖 | +| `cloudflare/src/types.ts` | MODIFY | ~10 行 | ConfirmResponse 字段对齐 | +| `cloudflare/src/routes/upload.ts` | **REWRITE** | 587 行 | 内联 list/delete/update + 修复 confirm 格式 + 修复 R2 流处理 bug | +| `cloudflare/src/routes/status.ts` | **REWRITE** | 29 行 | 内联 + 加 `uploadMode: 'presigned'` | +| `cloudflare/src/routes/folders.ts` | **REWRITE** | 77 行 | 内联 + 加 GET 端点 | +| `packages/uploader/src/react/plugins/presigned-upload.ts` | **CREATE** | 291 行 | Uppy 自定义上传插件 | +| `packages/uploader/src/react/uploader.tsx` | MODIFY | +39 行 | 条件分支:TUS / presigned | + +### 未改动的文件 + +| 文件 | 原因 | +|------|------| +| `blocklets/image-bin/` | 完全不动,Blocklet 上传逻辑不受影响 | +| `packages/media-kit-core/` | 不创建此包(方案已废弃) | +| `pnpm-workspace.yaml` | cloudflare 是独立部署,不需加入 workspace | + +--- + +## 三、详细改动说明 + +### 3.1 清理 CF Worker adapter 残留 + +**为什么**:之前 adapter pattern 方案废弃后,`cloudflare/src/adapters/` 目录和 `@blocklet/media-kit-core` 依赖残留在代码里。`media-kit-core` 包已被删除,导致 CF Worker 无法编译。 + +**做了什么**: +- 删除 `cloudflare/src/adapters/` 整个目录(`index.ts`、`database.ts`、`storage.ts`、`config.ts`) +- 从 `cloudflare/package.json` 移除 `"@blocklet/media-kit-core": "workspace:^"` 依赖 +- 所有路由文件移除对 adapters 和 media-kit-core 的 import + +### 3.2 CF Worker 路由内联重写 + +**为什么**:原来的 `GET /uploads`、`DELETE /uploads/:id`、`PUT /uploads/:id` 通过 adapter 调用 `media-kit-core` 的 handler 函数。包删除后需要用内联 Drizzle 查询替代。 + +#### upload.ts — 内联 list/delete/update + +**GET /uploads**(列表): +- 直接查 D1 `uploads` 表,支持分页 (`page`/`pageSize`) +- 权限控制:admin 可看所有,member 只能看自己的 +- 支持 `folderId` 和 `tag` 过滤 +- 查询 `uploadTags` 表获取每个文件的 tags +- 返回格式使用 `_id` 字段名(与 Blocklet Sequelize 记录一致) + +**DELETE /uploads/:id**(删除): +- Admin only(通过 `isAdminMiddleware`) +- 引用计数:同一个 `filename` 可能被多条记录引用(dedup),只有最后一条引用删除时才删 R2 文件 +- 同时删除关联的 `uploadTags` 记录 + +**PUT /uploads/:id**(移动到文件夹): +- Admin only +- 更新 `folderId` 和 `updatedAt` +- 返回更新后的完整记录 + +#### status.ts — 内联 + uploadMode + +**GET /uploader/status**: +- 从环境变量读取配置(`ALLOWED_FILE_TYPES`、`MAX_UPLOAD_SIZE`、`UNSPLASH_KEY`、`USE_AI_IMAGE`) +- 返回 `restrictions`、`availablePluginMap` +- **新增 `uploadMode: 'presigned'`** — 前端据此选择上传协议 + +#### folders.ts — 内联 + 新增 GET + +**POST /folders**(创建): +- Admin only,幂等(同名 folder 返回已有记录) +- 返回 `_id` 格式 + +**GET /folders**(列表,新增): +- 返回所有 folders,按创建时间倒序 + +### 3.3 CF Worker 响应格式对齐 + +**为什么**:消费者代码依赖 `result.data._id`、`result.data.createdAt` 等字段。CF Worker 之前返回 `id`、`created_at`,与 Blocklet 的 Sequelize 记录不一致。 + +**做了什么**: + +`ConfirmResponse` 类型从: +```typescript +{ id, filename, originalname, mimetype, size, url, created_at, hashFileName } +``` +改为: +```typescript +{ _id, filename, originalname, mimetype, size, url, createdAt, createdBy, tags } +``` + +confirm 端点的两个分支(dedup shortcut 和正常 confirm)都按新格式返回。list 和 update 端点也统一使用 `_id` 字段名。 + +### 3.4 修复 R2 流处理 bug(confirm 流程) + +**为什么**:原始代码(迁移前就存在)在 confirm 流程中有 R2 body 流处理缺陷: + +1. **double GET**:第 224 行做完整 GET(拿到 body stream),第 230 行又做 range GET(取 4KB header)。两次网络 I/O 读同一对象,浪费资源 +2. **SVG 流耗尽**:对 SVG 文件调用 `r2Object.text()` 消耗了整个 body stream,之后 `streamMD5(r2Object.body)` 读到空流,MD5 结果错误 +3. **size 比较误判**:content dedup 时用 `existingObject.size === session.totalSize` 判断是否需要 copy,但这不是内容相等的充分条件 + +**做了什么**: +- 分离 range GET(仅用于 MIME 检测)和 full GET(用于 SVG/hash) +- SVG 处理后始终重新 GET 获取 fresh stream 做 MD5 +- content dedup 简化:MD5 key 匹配即视为内容相同,不再比较 size + +### 3.5 创建 PresignedUpload Uppy 插件 + +**为什么**:CF Worker 不支持 TUS 协议(TUS 需要有状态的服务端 session,CF Workers 是无状态的)。CF Worker 使用 presigned URL 协议:客户端获取签名 URL 后直传文件到 R2,再调用 confirm 端点确认。需要一个 Uppy 自定义插件实现此协议。 + +**文件**:`packages/uploader/src/react/plugins/presigned-upload.ts`(291 行) + +**实现细节**: + +继承 `@uppy/core` 的 `BasePlugin`,在 `install()` 中通过 `addUploader()` 注册上传函数。 + +**上传流程**: + +``` +1. POST /uploads/check → 按 size+ext 去重检查 + ↓ exists=true → 跳到 4(clone) + ↓ exists=false → 继续 + +2. POST /uploads/presign → 获取 presigned URL + ↓ multipart=false → 单次直传 + ↓ multipart=true → 分片上传 + +3a. PUT presignedUrl → XHR 直传(带进度上报) +3b. 分片上传: + for each part: + POST /uploads/multipart/part-url → 获取分片 URL + PUT partUrl → 上传分片 + POST /uploads/multipart/complete → 组装分片 + +4. POST /uploads/confirm → 确认上传,获取 upload record +``` + +**关键设计**: + +- **进度上报**:直传使用 XHR `upload.onprogress`,分片使用累计已上传字节 +- **错误处理**:单个文件失败 emit `upload-error`,不阻塞其他文件。分片上传失败时自动调用 `POST /uploads/multipart/abort` 清理 R2 未完成的 multipart session +- **事件兼容**:完成后 emit `upload-success` 事件,携带 `body: confirmData`,由 `uploader.tsx` 的监听器统一调用 `_onUploadFinish` 和 `emitUploadSuccess`,与 TUS 流程行为一致 +- **bind 安全**:构造函数中一次性 bind `handleUpload`,`install`/`uninstall` 使用同一引用,避免 removeUploader 泄漏 + +### 3.6 修改 uploader.tsx — 条件分支 + +**为什么**:需要根据后端返回的 `uploadMode` 选择上传协议,同时保持 TUS 逻辑完全不变。 + +**改动点**(共 +39 行新增): + +1. **state 新增 `uploadMode`**: + ```typescript + uploadMode: 'tus' as 'tus' | 'presigned' + ``` + +2. **useRequest 中读取 uploadMode**: + ```typescript + state.uploadMode = data.uploadMode || 'tus'; + ``` + 从 `GET /api/uploader/status` 响应中获取。fallback 为 `'tus'`,所以 Blocklet 即使不返回此字段也完全兼容。 + +3. **initUploader 条件分支**: + ```typescript + if (uploadMode === 'presigned') { + currentUppy.use(PresignedUploadPlugin, { apiBase }); + currentUppy.on('upload-success', async (file, response) => { + // 构造与 TUS 一致的 result,调用 _onUploadFinish + }); + } else { + currentUppy.use(Tus, { ... }); // 原有逻辑,一行不改 + } + ``` + +4. **useEffect 依赖加 uploadMode**:确保 `uploadMode` 从 `'tus'` 变为 `'presigned'` 后 uppy 实例会重新初始化。 + +--- + +## 四、消费者兼容性 + +消费者代码: +```jsx + { + console.log(result.data._id); // ✅ 两种模式下都有 + console.log(result.data.url); // ✅ 两种模式下都有 + console.log(result.data.filename); // ✅ 两种模式下都有 +}} /> +``` + +`result.data` 在两种模式下结构一致: + +| 字段 | TUS (Blocklet) | Presigned (CF Worker) | +|------|----------------|----------------------| +| `_id` | Sequelize UUID | D1 UUID | +| `url` | `/uploads/hash.ext` | `/uploads/hash.ext` | +| `filename` | `hash.ext` | `hash.ext` | +| `originalname` | 原始文件名 | 原始文件名 | +| `mimetype` | MIME type | MIME type | +| `size` | 文件大小 | 文件大小 | +| `createdAt` | ISO 时间戳 | ISO 时间戳 | +| `createdBy` | user DID | user DID | +| `tags` | string[] | string[] | + +**消费者零改动,零感知。** + +--- + +## 五、验证结果 + +| 检查项 | 结果 | +|--------|------| +| CF Worker TypeScript 编译 | ✅ 通过(仅 `cloudflare:test` 预存 error) | +| CF Worker 单元测试(14 tests) | ✅ 全部通过 | +| Uploader 包构建(unbuild) | ✅ 通过,ESM/CJS 均生成 | +| Presigned plugin 编译输出 | ✅ `lib/` 和 `es/` 均包含 | +| 残留引用检查 | ✅ 无任何 `@blocklet/media-kit-core` 或 `adapters` 引用 | +| Blocklet 代码 | ✅ 未触碰任何文件 | + +--- + +## 六、架构图 + +``` +消费者代码(零改动) + └── + │ + ├── uploadMode === 'tus'(Blocklet 默认) + │ └── Uppy + @uppy/tus + │ └── TUS 协议 → Blocklet Express 后端 → 本地存储 + │ + └── uploadMode === 'presigned'(CF Worker) + └── Uppy + PresignedUploadPlugin + ├── POST /uploads/check → D1 去重 + ├── POST /uploads/presign → 获取签名 URL + ├── PUT presignedUrl → 直传 R2 + └── POST /uploads/confirm → D1 记录 + 返回 result.data +``` diff --git a/cloudflare/docs/MIGRATION-PLAN.md b/cloudflare/docs/MIGRATION-PLAN.md new file mode 100644 index 0000000..fc7a4c1 --- /dev/null +++ b/cloudflare/docs/MIGRATION-PLAN.md @@ -0,0 +1,1319 @@ +# Media Kit Cloudflare Migration Plan v6 + +> Integrated from Media Kit Owner + Cloudflare Expert + Cross-Model Review (Claude Opus 4.6 + GPT-5.4). +> Date: 2026-03-16 | Version: 6 + +--- + +## Changelog from v5 + +| # | Change | Source | Description | +|---|--------|--------|-------------| +| V6-1 | Auth ensureAdmin 改为配置式 | Cross-review P1 | role=member 与 ensureAdmin 矛盾,改为环境变量 ADMIN_DIDS 配置管理员 | +| V6-2 | Dedup check 改为 size+ext 粗筛 | Cross-review P1 | first-5MB hash 与 full-file hash filename 不匹配,改为按 size+ext 粗筛候选 | +| V6-3 | cf.image origin 改为 Worker subrequest | Cross-review P1 | cf.image 经 Cloudflare 图片代理,不转发自定义 header,WAF 方案不可行 | +| V6-4 | 补完前端响应契约 | Cross-review P1 | confirm/check/presign 的响应 schema 明确定义 | +| V6-5 | 补充 folders 数据迁移 | Cross-review P2 | 迁移脚本遗漏 folders 表 | +| V6-6 | 补充 tags 查询 API | Cross-review P2 | GET /api/uploads 缺少 tag 过滤参数 | +| V6-7 | SVG 清洗改用 Workers 兼容方案 | Claude 发现 | DOMPurify 依赖 DOM API,Workers 中不可用 | +| V6-8 | ListParts XML 解析改用标准解析器 | Claude 发现 | 正则解析 XML 脆弱,改用 Workers 内置 HTMLRewriter 或分步解析 | +| V6-9 | 大文件 confirm 超时处理 | Cross-review P2 | >500MB 文件 hash 可能超 30s CPU 限制 | + +--- + +## Changelog from v4 (preserved) + +| # | Change | Source | Description | +|---|--------|--------|-------------| +| V5-1 | 认证简化为默认放行 | 用户确认 | 先默认一个 DID 可上传,Shijun 过几天给出认证方案后再对接 | +| V5-2 | AFS 推迟到下个版本 | 用户确认 | 先完成 Cloudflare 迁移,AFS 集成放到 v2 | +| V5-3 | 废弃 Blocklet SDK API | 用户确认 | Group B (SDK Upload) 和所有 blocklet sdk 相关接口全部移除 | +| V5-4 | 移除 Service Binding 认证 | 随 V5-1 | Service Binding 通信保留,但认证逻辑待后续 | + +--- + +## Changelog from v3 (preserved) + +| # | Change | Source | Description | +|---|--------|--------|-------------| +| V4-1 | 移除认证设计 | Shijun 反馈 | 认证部分由 Shijun 封装的工具统一处理 | +| V4-2 | 新增 AFS 集成 | Shijun 反馈 | 设计 media-kit 作为 AIGNE AFS 存储后端(已推迟到 v2) | +| V4-3 | 移除 nonces 表 | 随 V4-1 | 不再需要 HMAC 防重放 | +| V4-4 | 简化 wrangler.toml | 随 V4-1 | 移除认证相关密钥 | + +--- + +## Changelog from v2 (preserved) + +| # | Issue (from Round 2 Cross-Review) | Severity | Source | Fix in v3 | +|---|----------------------------------|----------|--------|-----------| +| R2-1 | Service Binding `x-service-binding` header 可被外部伪造 | P1 | 双模型确认 | 改为 shared secret 验证 + 删除公网可触及的 header 信任 | +| R2-2 | confirm 步骤 streamMD5 全量加载到内存,100MB 文件接近 128MB 限制 | P1 | 双模型确认 | 改用 js-md5 增量哈希 (O(1) 内存) | +| R2-3 | HMAC nonce 防重放仍是 TODO,未实现 | P1 | Codex 发现 | 实现 KV nonce 存储 + 5 分钟 TTL | +| R2-4 | 服务端文件校验缺失(SVG 窗口期 + mimetype 信任客户端) | P1 | Codex 发现 | confirm 步骤增加 mimetype 校验 + SVG sanitize 在 promote 前完成 | +| R2-5 | `R2.copy()` API 不存在 | P1 | 双模型确认 | 改为 S3 CopyObject via aws4fetch | +| R2-6 | `crypto.subtle.digest('MD5')` Workers 不支持 | P2 | Claude 发现 | 改用 js-md5 增量哈希 | +| R2-7 | 前端 multipart 代码缺少 `/multipart/complete` 调用 | P2 | Codex 发现 | 修复前端流程:parts → complete → confirm | +| R2-8 | Dedup check (first 5MB MD5) 存在碰撞误判风险 | P2 | Codex 发现 | Dedup 降级为 hint,confirm 阶段用全文件 hash 最终确认 | +| R2-9 | `/api/url/import` 无 SSRF 防护 | P2 | Codex 发现 | 加 host denylist + size limit + redirect cap | +| R2-10 | R2 Workers API 无 listParts 方法 | P2 | Claude 发现 | 改用 S3 ListParts via aws4fetch | +| R2-11 | Transform Rule 不能直接触发 Image Resizing | P2 | Claude 发现 | 改为 Worker 内 `cf.image` 统一处理 | + +### Fixes from Round 3 (included in v3) + +| # | Issue (from Round 3) | Severity | Source | Fix | +|---|---------------------|----------|--------|-----| +| R3-1 | Service Binding 示例代码仍用旧 header | P2 | Codex | 更新示例使用 x-sb-secret | +| R3-2 | KV nonce 最终一致性可被跨区域并发绕过 | P1 | Codex | 改用 D1 nonces 表(单主写入,原子唯一性) | +| R3-3 | Mimetype 校验是客户端自引用 | P1 | Codex | 改为 magic-byte 内容嗅探(前 4KB) | +| R3-4 | 前端 dedup 仍然跳过 confirm | P2 | Codex | 即使 dedup 命中也调用 confirm 创建用户记录 | +| R3-5 | 公开 R2 域名绕过 EXIF 移除 | P1 | Codex | R2 bucket 私有 + WAF 限制 origin 访问 | +| R3-6 | /multipart/complete 与 /confirm 职责矛盾 | P2 | Codex | 明确 complete 仅组装对象,confirm 统一做校验/promote | +| R3-7 | Dedup 路径不删除 tempKey | P3 | Codex | 两条路径都删除 tempKey | + +### v1 → v2 Changelog (preserved) + +| # | Issue (from Codex R1) | Severity | Fix in v2 | +|---|----------------------|----------|-----------| +| 1 | R2 multipart 前端直传方案不完整 | P1 | 重新设计三层上传协议,补完 S3 presigned multipart 全流程 | +| 2 | 断点续传能力丢失未补偿 | P1 | 设计 resumable multipart 协议(客户端持久化 + listParts) | +| 3 | 95MB 直传阈值过高,内存溢出 | P1 | 小文件改为 R2 presigned PUT 直传(绕过 Worker 内存);Worker 仅做签名 | +| 4 | 客户端传入 hash 作 key,覆盖风险 | P1 | 改为 temp key 上传 → 服务端校验 → promote 流程 | +| 5 | HMAC 认证消费 body + 重放 + 权限过大 | P1 | 改为 canonical request 签名(不读 body),加 nonce,scoped 权限 | +| 6 | EXIF 策略回退隐私承诺 | P1 | 强制所有图片经过 Image Resizing,确保 EXIF 始终移除 | +| 7 | Unsplash 重新托管违反 ToS | P1 | 改为 hotlink + attribution 模式,仅存 metadata | +| 8 | tags JSON 全表扫描 | P2 | 新增 upload_tags 关联表 | +| 9 | D1 全球访问模型未设计 | P2 | 补充 primary location + read replication 策略 | +| 10 | 成本估算不准 | P2 | 三档成本模型(保守/中位/峰值) | +| 11 | 文档内在不一致 | P2 | 修复 check API、迁移脚本参数化、status 响应兼容 | +| 12 | Service Binding 可平替 component.call | P2 | 补充 Service Binding 架构 | + +--- + +## 1. Executive Summary + +将 Media Kit (image-bin) 从 ArcBlock Blocklet Server 迁移到 Cloudflare Workers + R2 + D1。 + +**Scope Change Declaration**: 迁移后,断点续传能力从 TUS 的字节级断点改为 R2 multipart 的分片级断点(最小粒度 5MB part),刷新页面后可恢复未完成的分片上传。这是可接受的产品范围变更。 + +**核心技术栈变更**: + +| 层 | 现有 | 迁移后 | +|---|---|---| +| 运行时 | Express.js + Node.js | Hono + Cloudflare Workers | +| 文件存储 | 本地磁盘 | Cloudflare R2 | +| 数据库 | SQLite + Sequelize | Cloudflare D1 + Drizzle ORM | +| 上传协议 | TUS 断点续传 (10MB chunks) | R2 presigned PUT + R2 S3 multipart | +| 图片处理 | 无 | Cloudflare Image Resizing (Worker `cf.image`) | +| CDN | CDN_HOST URL 替换 | Cloudflare CDN(原生) | +| 认证 | DID Wallet + 组件签名 | 默认放行(预留 DID),待 Shijun 提供认证方案 | +| 组件间调用 | blocklet component.call | Cloudflare Service Binding(保留通信,认证待定) | +| 前端 | Vite + React (Blocklet) | Vite + React (Cloudflare Pages) | + +--- + +## 2. Feature Inventory & Migration Impact + +### 2.1 API Endpoints + +#### Group A: Core Upload Management + +| Method | Path | Auth | Function | Impact | +|--------|------|------|----------|--------| +| GET | `/uploads/:filename` | Optional (referer) | 文件静态服务 + Image Resizing + EXIF strip | 🔄 Adapt | +| POST | `/api/uploads/presign` | user + auth | 获取 R2 presigned PUT URL(小文件)或创建 multipart session(大文件) | 🔧 New | +| POST | `/api/uploads/confirm` | user + auth | 上传完成确认,服务端校验 + promote + 写 D1 | 🔧 New | +| POST | `/api/uploads/multipart/part-url` | user + auth | 获取单个 part 的 presigned PUT URL | 🔧 New | +| POST | `/api/uploads/multipart/complete` | user + auth | 完成 multipart 上传 | 🔧 New | +| POST | `/api/uploads/multipart/abort` | user + auth | 中止 multipart 上传 | 🔧 New | +| GET | `/api/uploads/multipart/status` | user + auth | 查询 multipart 上传进度(已完成 parts) | 🔧 New | +| POST | `/api/uploads/check` | user + auth | 文件去重检查(仅返回当前用户范围) | 🔧 New | +| GET | `/api/uploads` | user + auth | 分页列出上传文件(支持 ?tag= 过滤) | 🔄 Adapt | +| DELETE | `/api/uploads/:id` | user + isAdmin | 删除(引用计数) | 🔄 Adapt | +| PUT | `/api/uploads/:id` | user + isAdmin | 移动到 folder | 🔄 Adapt | + +#### ~~Group B: SDK Upload~~ (v5 废弃) + +> Blocklet SDK 相关的 API 全部废弃,不迁移。 + +#### Group B: Supporting Features + +| Method | Path | Auth | Function | Impact | +|--------|------|------|----------|--------| +| POST | `/api/folders` | user + isAdmin | 创建文件夹 | 🔄 Adapt | +| POST | `/api/image/generations` | user + auth | AI 图片生成 | 🔄 Adapt | +| GET | `/api/image/models` | None | AI 模型列表 | 🔄 Adapt | +| GET | `/api/uploader/status` | None | 上传器配置(兼容现有前端响应 schema) | 🔄 Adapt | +| GET | `/api/unsplash/search` | user + auth | Unsplash 搜索(hotlink 模式) | 🔧 New | +| POST | `/api/unsplash/track-download` | user + auth | 触发 Unsplash download tracking | 🔧 New | +| POST | `/api/url/import` | user + auth | 从 URL 导入文件 | 🔧 New | + +#### Group C: Drop + +| Path | Reason | +|------|--------| +| `/api/resources`, `/api/resources/export` | Blocklet imgpack 体系 | +| `/proxy-to-uploads/*` | Blocklet 内部代理 | +| `/api/sdk/uploads`, `/api/sdk/uploads/find` | Blocklet SDK 废弃 | + +#### Group D: Service Binding (替代 component.call) + +其他 Cloudflare Workers 通过 Service Binding 直接调用 media-kit Worker: + +```toml +# 其他 Worker 的 wrangler.toml +[[services]] +binding = "MEDIA_KIT" +service = "media-kit" +``` + +```typescript +// 调用方(零网络延迟) +// 认证方案待 Shijun 确定后对接 +const res = await env.MEDIA_KIT.fetch( + new Request('https://media-kit.internal/api/uploads', { + method: 'POST', + body: formData, + headers: { + 'x-caller-id': 'my-worker-name', + }, + }) +); +``` + +### 2.2 Data Model + +**uploads 表**(不变): + +| Field | Type | Notes | +|-------|------|-------| +| id | TEXT PK | UUID | +| filename | TEXT | MD5 hash + ext | +| originalname | TEXT | 原始文件名 | +| mimetype | TEXT | MIME 类型 | +| size | INTEGER | 字节大小 | +| remark | TEXT | 备注 | +| folder_id | TEXT | 所属文件夹 | +| created_at | TEXT | ISO 时间 | +| updated_at | TEXT | ISO 时间 | +| created_by | TEXT | 创建者 | +| updated_by | TEXT | 更新者 | + +**upload_tags 表**(新增,替代 JSON tags 字段): + +| Field | Type | Notes | +|-------|------|-------| +| upload_id | TEXT FK | 关联 uploads.id | +| tag | TEXT | 标签值 | +| PK | (upload_id, tag) | 复合主键 | + +索引:`(tag, upload_id)` — 支持按 tag 高效查询 + +**folders 表**(不变) + +**upload_sessions 表**(新增,管理 multipart 上传状态): + +| Field | Type | Notes | +|-------|------|-------| +| id | TEXT PK | UUID | +| upload_id | TEXT | R2 multipart uploadId | +| key | TEXT | R2 object key (temp key) | +| final_key | TEXT | 最终 key (content hash) | +| total_size | INTEGER | 预期总大小 | +| part_size | INTEGER | 每 part 大小 | +| status | TEXT | 'active' / 'completed' / 'aborted' | +| created_by | TEXT | 创建者 | +| created_at | TEXT | 创建时间 | +| expires_at | TEXT | 过期时间(默认 24h) | + +--- + +## 3. Architecture + +``` + ┌────────────────────────────┐ + │ Cloudflare CDN │ + │ (caches Worker responses) │ + └──────────┬─────────────────┘ + │ + ┌─────────────────▼──────────────────┐ + │ Cloudflare Worker │ + │ (Hono framework) │ + ├────────────────────────────────────┤ + │ Middleware: │ + │ cors → auth (默认放行) → ... │ + │ │ + │ Image serving: │ + │ cf.image { metadata:"none" } │ + │ (EXIF strip + resize in Worker) │ + │ │ + │ Upload flow: │ + │ presign → client PUT R2 → confirm │ + │ (Worker 不接收文件 body) │ + └──┬──────────┬────────┬──┬──────────┘ + │ │ │ │ + ┌─────────▼──┐ ┌────▼───┐ ┌──▼──▼──────────┐ + │ R2 Bucket │ │ D1 │ │ Service Bind │ + │ uploads │ │ SQLite │ │ → other Workers │ + │ (PRIVATE) │ │ │ │ │ + └────────────┘ └────────┘ └─────────────────┘ + + ┌─────────────────┐ + │ Cloudflare Pages │ + │ (React Frontend) │ + └─────────────────┘ +``` + +> **IMPORTANT**: R2 bucket MUST be private (no public access). All file access goes +> through the Worker, which applies `cf.image { metadata: 'none' }` for EXIF stripping. +> A public R2 origin would allow bypassing EXIF removal. + +### D1 Deployment Strategy + +| Config | Value | Reason | +|--------|-------|--------| +| Primary Location | `auto` (nearest to first write) or explicit `enam`/`apac` | 根据主要用户群选择 | +| Read Replication | Enabled | 列表查询可接受副本读 | +| Session API | `withSession("first-primary")` for writes | 写后读一致性 | +| Writes requiring consistency | Upload confirm, delete with ref count | 必须打到 primary | +| Reads tolerating staleness | List uploads, folder list, status | 可用 read replica | + +--- + +## 4. Upload Flow Redesign + +### 4.1 Design Principles + +1. **Worker 不接收文件 body** — 所有文件都直传 R2(presigned URL),Worker 仅做签名和元数据 +2. **Temp key → Promote** — 上传先写临时 key,服务端校验后 rename 为内容寻址 key +3. **可恢复** — Multipart 上传支持刷新页面后恢复(客户端持久化 session,服务端 listParts) +4. **服务端校验** — 上传完成后服务端计算完整文件 hash,不信任客户端 hash + +### 4.2 Upload Tiers + +| Tier | File Size | Method | Resumable | +|------|-----------|--------|-----------| +| Small | < 100MB | R2 presigned PUT (single request) | No (单次完成) | +| Large | >= 100MB | R2 S3 multipart (per-part presigned PUT) | Yes (part 级) | + +### 4.3 Small File Flow (< 100MB) + +``` +1. Client: compute file metadata for dedup hint +2. Client → POST /api/uploads/check { ext, size } + → Worker: query D1 for uploads where size = {size} AND filename LIKE '%.{ext}' + (按 size + ext 粗筛候选文件) + → If single match: return { exists: true, url, filename, uploadId } + (前端可展示 "已存在相同文件",用户确认后调用 confirm 复用) + → If multiple matches: return { exists: false } + (多个候选无法确定,走正常上传流程) + → If no match: return { exists: false } + → NOTE: dedup check is a HINT only. It reduces unnecessary uploads but never + skips server-side hash verification. The confirm step always computes full-file + MD5 to determine the final content-addressable key. + +3. Client → POST /api/uploads/presign { + originalname, mimetype, size, ext, folderId + } + → Worker: + a. Generate temp key: `tmp/{uuid}.{ext}` + b. Generate presigned PUT URL for R2 (using S3 API) + c. Save upload session to D1 (upload_sessions) + d. Return { presignedUrl, sessionId, tempKey } + +4. Client → PUT presignedUrl (直传 R2,绕过 Worker) + → R2 receives file body + +5. Client → POST /api/uploads/confirm { sessionId } + → Worker: + a. Read first 4KB of R2 temp key for magic-byte content sniffing + (e.g., JPEG starts with FF D8 FF, PNG with 89 50 4E 47) + If detected mimetype conflicts with client-claimed mimetype, reject upload. + This prevents disguised file attacks (e.g., .exe renamed to .jpg). + b. Read file from R2 temp key (streaming) + c. Compute full MD5 hash (streaming js-md5, O(1) memory) + d. Final key = `{serverMD5}.{ext}` + e. If SVG: read content, sanitize (sanitize-svg — Workers-compatible, no DOM dependency), + re-upload sanitized version to tempKey + NOTE: DOMPurify requires DOM API (document/window) which Workers do NOT have. + Use `@poppanator/shtml` or a regex-based SVG sanitizer that strips + + +
+ + + diff --git a/cloudflare/frontend/package.json b/cloudflare/frontend/package.json new file mode 100644 index 0000000..23217d0 --- /dev/null +++ b/cloudflare/frontend/package.json @@ -0,0 +1,67 @@ +{ + "name": "media-kit-frontend", + "private": true, + "version": "1.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "preview": "vite preview" + }, + "dependencies": { + "@emotion/react": "^11.14.0", + "@emotion/styled": "^11.14.0", + "@lottiefiles/react-lottie-player": "^3.5.4", + "@mui/icons-material": "^7.1.2", + "@mui/material": "^7.1.2", + "@uppy/core": "3.13.1", + "@uppy/dashboard": "3.9.1", + "@uppy/drag-drop": "3.1.1", + "@uppy/drop-target": "2.1.0", + "@uppy/file-input": "3.1.2", + "@uppy/golden-retriever": "3.2.0", + "@uppy/image-editor": "2.4.6", + "@uppy/locales": "3.5.4", + "@uppy/progress-bar": "3.1.1", + "@uppy/provider-views": "3.13.0", + "@uppy/react": "3.4.0", + "@uppy/status-bar": "3.3.3", + "@uppy/tus": "3.5.5", + "@uppy/unsplash": "3.3.1", + "@uppy/url": "3.6.1", + "@uppy/webcam": "3.4.2", + "ahooks": "^3.8.1", + "axios": "^1.7.0", + "copy-to-clipboard": "^3.3.3", + "dompurify": "^3.2.2", + "exifr": "^7.1.3", + "fflate": "^0.8.2", + "js-cookie": "^3.0.5", + "lodash": "^4.17.21", + "micromatch": "^4.0.8", + "mime-types": "^2.1.35", + "path-browserify": "^1.0.1", + "preact": "10.20.1", + "pretty-bytes": "^6.1.0", + "prop-types": "^15.8.1", + "react": "^19.0.0", + "react-dom": "^19.0.0", + "react-player": "^2.16.0", + "react-router-dom": "^6.28.0", + "spark-md5": "^3.0.2", + "timeago.js": "^4.0.2", + "ufo": "^1.6.1", + "url-join": "^4.0.1", + "wolfy87-eventemitter": "^5.2.9", + "xbytes": "^1.9.1" + }, + "devDependencies": { + "@types/js-cookie": "^3.0.6", + "@types/lodash": "^4.17.0", + "@types/react": "^19.0.0", + "@types/react-dom": "^19.0.0", + "@vitejs/plugin-react": "^4.3.0", + "typescript": "^5.5.0", + "vite": "^6.0.0" + } +} diff --git a/cloudflare/frontend/src/App.tsx b/cloudflare/frontend/src/App.tsx new file mode 100644 index 0000000..dbc2cac --- /dev/null +++ b/cloudflare/frontend/src/App.tsx @@ -0,0 +1,143 @@ +import { useState, useEffect, useCallback } from 'react'; +import AppBar from '@mui/material/AppBar'; +import Toolbar from '@mui/material/Toolbar'; +import Typography from '@mui/material/Typography'; +import Container from '@mui/material/Container'; +import Box from '@mui/material/Box'; +import CircularProgress from '@mui/material/CircularProgress'; +import Dialog from '@mui/material/Dialog'; +import DialogTitle from '@mui/material/DialogTitle'; +import DialogContent from '@mui/material/DialogContent'; +import DialogActions from '@mui/material/DialogActions'; +import Button from '@mui/material/Button'; +import api from './api'; +import UploadButton from './components/UploadButton'; +import ImageGrid from './components/ImageGrid'; +import FolderList from './components/FolderList'; + +interface Upload { + id: string; + filename: string; + originalname: string; + mimetype: string; + size: number; + created_at: string; +} + +interface Folder { + id: string; + name: string; +} + +export default function App() { + const [uploads, setUploads] = useState([]); + const [folders, setFolders] = useState([]); + const [selectedFolder, setSelectedFolder] = useState(''); + const [loading, setLoading] = useState(true); + const [page, setPage] = useState(1); + const [hasMore, setHasMore] = useState(true); + const [deleteTarget, setDeleteTarget] = useState(null); + + const pageSize = 20; + + const fetchUploads = useCallback( + async (p = 1, append = false) => { + setLoading(true); + try { + const params: any = { page: p, pageSize }; + if (selectedFolder) params.folderId = selectedFolder; + const { data } = await api.get('/api/uploads', { params }); + const list = data.data || []; + setUploads(append ? (prev) => [...prev, ...list] : list); + setHasMore(list.length >= pageSize); + setPage(p); + } finally { + setLoading(false); + } + }, + [selectedFolder] + ); + + const fetchFolders = async () => { + try { + const { data } = await api.get('/api/folders'); + setFolders(data.data || data || []); + } catch { + // folders endpoint may not exist yet + } + }; + + useEffect(() => { + fetchUploads(1); + }, [fetchUploads]); + + useEffect(() => { + fetchFolders(); + }, []); + + const handleUploadFinish = () => { + fetchUploads(1); + }; + + const handleDelete = async () => { + if (!deleteTarget) return; + await api.delete(`/api/uploads/${deleteTarget}`); + setDeleteTarget(null); + fetchUploads(1); + }; + + const loadMore = () => { + if (hasMore && !loading) { + fetchUploads(page + 1, true); + } + }; + + return ( + + + + + Media Kit + + + + + + + setSelectedFolder(id)} + onCreated={fetchFolders} + /> + + + + setDeleteTarget(id)} /> + + {loading && ( + + + + )} + + {!loading && hasMore && uploads.length > 0 && ( + + + + )} + + + setDeleteTarget(null)}> + Delete File + Are you sure you want to delete this file? + + + + + + + ); +} diff --git a/cloudflare/frontend/src/api.ts b/cloudflare/frontend/src/api.ts new file mode 100644 index 0000000..fc58335 --- /dev/null +++ b/cloudflare/frontend/src/api.ts @@ -0,0 +1,14 @@ +import axios from 'axios'; + +const api = axios.create({ + timeout: 200000, + headers: { + 'x-user-did': 'did:abt:default-uploader', + }, +}); + +export default api; + +export function getImageUrl(filename: string): string { + return `/uploads/${filename}`; +} diff --git a/cloudflare/frontend/src/components/FolderList.tsx b/cloudflare/frontend/src/components/FolderList.tsx new file mode 100644 index 0000000..c7d1ac7 --- /dev/null +++ b/cloudflare/frontend/src/components/FolderList.tsx @@ -0,0 +1,89 @@ +import { useState } from 'react'; +import Box from '@mui/material/Box'; +import Chip from '@mui/material/Chip'; +import Button from '@mui/material/Button'; +import TextField from '@mui/material/TextField'; +import Dialog from '@mui/material/Dialog'; +import DialogTitle from '@mui/material/DialogTitle'; +import DialogContent from '@mui/material/DialogContent'; +import DialogActions from '@mui/material/DialogActions'; +import FolderIcon from '@mui/icons-material/Folder'; +import AddIcon from '@mui/icons-material/Add'; +import api from '../api'; + +interface Folder { + id: string; + name: string; +} + +interface Props { + folders: Folder[]; + selectedFolder: string; + onSelect: (id: string) => void; + onCreated: () => void; +} + +export default function FolderList({ folders, selectedFolder, onSelect, onCreated }: Props) { + const [open, setOpen] = useState(false); + const [name, setName] = useState(''); + const [creating, setCreating] = useState(false); + + const handleCreate = async () => { + if (!name.trim()) return; + setCreating(true); + try { + await api.post('/api/folders', { name: name.trim() }); + setName(''); + setOpen(false); + onCreated(); + } finally { + setCreating(false); + } + }; + + return ( + <> + + } + label="All" + variant={selectedFolder === '' ? 'filled' : 'outlined'} + color={selectedFolder === '' ? 'primary' : 'default'} + onClick={() => onSelect('')} + /> + {folders.map((f) => ( + } + label={f.name} + variant={selectedFolder === f.id ? 'filled' : 'outlined'} + color={selectedFolder === f.id ? 'primary' : 'default'} + onClick={() => onSelect(f.id)} + /> + ))} + } label="New Folder" variant="outlined" onClick={() => setOpen(true)} /> + + + setOpen(false)} maxWidth="xs" fullWidth> + Create Folder + + setName(e.target.value)} + onKeyDown={(e) => e.key === 'Enter' && handleCreate()} + /> + + + + + + + + ); +} diff --git a/cloudflare/frontend/src/components/ImageGrid.tsx b/cloudflare/frontend/src/components/ImageGrid.tsx new file mode 100644 index 0000000..b154f15 --- /dev/null +++ b/cloudflare/frontend/src/components/ImageGrid.tsx @@ -0,0 +1,128 @@ +import { useState } from 'react'; +import Box from '@mui/material/Box'; +import Card from '@mui/material/Card'; +import CardMedia from '@mui/material/CardMedia'; +import CardActions from '@mui/material/CardActions'; +import IconButton from '@mui/material/IconButton'; +import Tooltip from '@mui/material/Tooltip'; +import Typography from '@mui/material/Typography'; +import ContentCopyIcon from '@mui/icons-material/ContentCopy'; +import DownloadIcon from '@mui/icons-material/Download'; +import DeleteIcon from '@mui/icons-material/Delete'; +import Snackbar from '@mui/material/Snackbar'; +import prettyBytes from 'pretty-bytes'; +import { format as timeago } from 'timeago.js'; +import { getImageUrl } from '../api'; + +interface Upload { + id: string; + filename: string; + originalname: string; + mimetype: string; + size: number; + created_at: string; + url?: string; +} + +interface Props { + uploads: Upload[]; + onDelete: (id: string) => void; +} + +function isImage(mimetype: string) { + return mimetype?.startsWith('image/'); +} + +function isVideo(mimetype: string) { + return mimetype?.startsWith('video/'); +} + +export default function ImageGrid({ uploads, onDelete }: Props) { + const [snackMsg, setSnackMsg] = useState(''); + + const copyUrl = (filename: string) => { + const url = `${window.location.origin}${getImageUrl(filename)}`; + navigator.clipboard.writeText(url); + setSnackMsg('URL copied'); + }; + + const download = (filename: string, originalname: string) => { + const a = document.createElement('a'); + a.href = getImageUrl(filename); + a.download = originalname; + a.click(); + }; + + if (uploads.length === 0) { + return ( + + No files uploaded yet + Click "Upload" to add files + + ); + } + + return ( + <> + + {uploads.map((item) => ( + + {isImage(item.mimetype) ? ( + + ) : isVideo(item.mimetype) ? ( + + + ) : ( + + + {item.mimetype || 'File'} + + + )} + + + + {item.originalname} + + + + {prettyBytes(item.size || 0)} · {timeago(item.created_at)} + + + + + copyUrl(item.filename)}> + + + + + download(item.filename, item.originalname)}> + + + + + onDelete(item.id)}> + + + + + + ))} + + setSnackMsg('')} + message={snackMsg} + anchorOrigin={{ vertical: 'bottom', horizontal: 'center' }} + /> + + ); +} diff --git a/cloudflare/frontend/src/components/UploadButton.tsx b/cloudflare/frontend/src/components/UploadButton.tsx new file mode 100644 index 0000000..8486d4f --- /dev/null +++ b/cloudflare/frontend/src/components/UploadButton.tsx @@ -0,0 +1,67 @@ +import { lazy, Suspense, useRef, useCallback } from 'react'; +import Button from '@mui/material/Button'; +import CircularProgress from '@mui/material/CircularProgress'; +import CloudUploadIcon from '@mui/icons-material/CloudUpload'; +import { createPortal } from 'react-dom'; + +const UploaderComponent = lazy(() => + import('@blocklet/uploader').then((res) => ({ + default: res.Uploader, + })) +); + +interface Props { + folderId?: string; + onUploadFinish?: (data: any) => void; +} + +export default function UploadButton({ folderId, onUploadFinish }: Props) { + const uploaderRef = useRef(null); + + const handleOpen = useCallback(() => { + uploaderRef.current?.open(); + }, []); + + const uploaderPortal = ( + + { + onUploadFinish?.(result.data); + }} + uploadedProps={{ + onSelectedFiles: (files: any[]) => { + if (files.length) { + onUploadFinish?.(files[0]); + } + }, + }} + coreProps={{ + restrictions: { + allowedFileExts: ['.jpeg', '.png', '.gif', '.svg', '.webp', '.bmp', '.ico', '.jpg'], + maxFileSize: 500 * 1024 * 1024, + }, + meta: { folderId: folderId || '' }, + }} + apiPathProps={{ + uploader: '/api/uploads', + companion: '/api/companion', + }} + installerProps={{ + disabled: true, + }} + locale="en" + /> + + ); + + return ( + <> + + {createPortal(uploaderPortal, document.body)} + + ); +} diff --git a/cloudflare/frontend/src/main.tsx b/cloudflare/frontend/src/main.tsx new file mode 100644 index 0000000..661ebcd --- /dev/null +++ b/cloudflare/frontend/src/main.tsx @@ -0,0 +1,7 @@ +// window.blocklet is already set in index.html '; + const result = sanitizeSvg(input); + expect(result).not.toContain(''); + }); + + it('removes on* event attributes', () => { + const input = ''; + const result = sanitizeSvg(input); + expect(result).not.toContain('onclick'); + expect(result).not.toContain('onload'); + }); + + it('removes javascript: URLs', () => { + const input = 'click'; + const result = sanitizeSvg(input); + expect(result).not.toContain('javascript:'); + }); + + it('removes foreignObject', () => { + const input = ''; + const result = sanitizeSvg(input); + expect(result).not.toContain('foreignObject'); + }); + + it('preserves safe SVG content', () => { + const input = ''; + const result = sanitizeSvg(input); + expect(result).toBe(input); + }); +}); diff --git a/cloudflare/src/__tests__/worker.integration.ts b/cloudflare/src/__tests__/worker.integration.ts new file mode 100644 index 0000000..96cd2bf --- /dev/null +++ b/cloudflare/src/__tests__/worker.integration.ts @@ -0,0 +1,90 @@ +import { describe, it, expect } from 'vitest'; +import { env, SELF } from 'cloudflare:test'; + +describe('Worker integration', () => { + it('responds to health check', async () => { + const res = await SELF.fetch('https://media-kit.test/health'); + expect(res.status).toBe(200); + const body = await res.json() as any; + expect(body.status).toBe('ok'); + expect(body.version).toBe('1.0.0'); + }); + + it('returns 404 for unknown routes', async () => { + const res = await SELF.fetch('https://media-kit.test/nonexistent'); + expect(res.status).toBe(404); + }); + + it('returns uploader status without auth', async () => { + const res = await SELF.fetch('https://media-kit.test/api/uploader/status'); + expect(res.status).toBe(200); + const body = await res.json() as any; + expect(body).toHaveProperty('availablePluginMap'); + expect(body).toHaveProperty('preferences'); + expect(body).toHaveProperty('restrictions'); + expect(body.availablePluginMap.Resources).toBe(false); + expect(body.restrictions).toHaveProperty('allowedFileTypes'); + expect(body.restrictions).toHaveProperty('maxFileSize'); + }); + + it('returns 404 for non-existent upload file', async () => { + const res = await SELF.fetch('https://media-kit.test/uploads/nonexistent.png'); + expect(res.status).toBe(404); + }); + + it('check endpoint returns exists:false for empty DB', async () => { + const res = await SELF.fetch('https://media-kit.test/api/uploads/check', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ size: 1024, ext: '.png' }), + }); + expect(res.status).toBe(200); + const body = await res.json() as any; + expect(body.exists).toBe(false); + }); + + it('presign endpoint returns sessionId and presignedUrl', async () => { + const res = await SELF.fetch('https://media-kit.test/api/uploads/presign', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + originalname: 'test.png', + mimetype: 'image/png', + size: 1024, + ext: '.png', + }), + }); + expect(res.status).toBe(200); + const body = await res.json() as any; + expect(body).toHaveProperty('sessionId'); + // presignedUrl may fail without real R2 credentials, but sessionId should exist + expect(typeof body.sessionId).toBe('string'); + }); + + it('list uploads returns paginated response', async () => { + const res = await SELF.fetch('https://media-kit.test/api/uploads'); + expect(res.status).toBe(200); + const body = await res.json() as any; + expect(body).toHaveProperty('uploads'); + expect(body).toHaveProperty('folders'); + expect(body).toHaveProperty('total'); + expect(body).toHaveProperty('page'); + expect(body).toHaveProperty('pageSize'); + }); + + it('auth middleware sets default DID', async () => { + const res = await SELF.fetch('https://media-kit.test/api/uploads'); + expect(res.status).toBe(200); + // If auth failed, we'd get 401/403 + }); + + it('admin routes reject non-admin users', async () => { + // Default DID is in ADMIN_DIDS, so it IS admin + // A different DID should be rejected + const res = await SELF.fetch('https://media-kit.test/api/uploads/some-id', { + method: 'DELETE', + headers: { 'x-user-did': 'did:abt:non-admin-user' }, + }); + expect(res.status).toBe(403); + }); +}); diff --git a/cloudflare/src/db/schema.ts b/cloudflare/src/db/schema.ts new file mode 100644 index 0000000..75d6b8a --- /dev/null +++ b/cloudflare/src/db/schema.ts @@ -0,0 +1,56 @@ +import { sqliteTable, text, integer, index, primaryKey } from 'drizzle-orm/sqlite-core'; + +export const uploads = sqliteTable('uploads', { + id: text('id').primaryKey(), + filename: text('filename').notNull(), + originalname: text('originalname'), + mimetype: text('mimetype'), + size: integer('size'), + remark: text('remark').default(''), + folderId: text('folder_id'), + createdAt: text('created_at'), + updatedAt: text('updated_at'), + createdBy: text('created_by'), + updatedBy: text('updated_by'), +}, (table) => ({ + filenameIdx: index('idx_uploads_filename').on(table.filename), + folderIdIdx: index('idx_uploads_folder_id').on(table.folderId), + mimetypeIdx: index('idx_uploads_mimetype').on(table.mimetype), + createdByIdx: index('idx_uploads_created_by').on(table.createdBy), + createdAtIdx: index('idx_uploads_created_at').on(table.createdAt), +})); + +export const uploadTags = sqliteTable('upload_tags', { + uploadId: text('upload_id').notNull().references(() => uploads.id, { onDelete: 'cascade' }), + tag: text('tag').notNull(), +}, (table) => ({ + pk: primaryKey({ columns: [table.uploadId, table.tag] }), + tagIdx: index('idx_upload_tags_tag').on(table.tag, table.uploadId), +})); + +export const folders = sqliteTable('folders', { + id: text('id').primaryKey(), + name: text('name').notNull(), + createdAt: text('created_at'), + updatedAt: text('updated_at'), + createdBy: text('created_by'), + updatedBy: text('updated_by'), +}, (table) => ({ + nameIdx: index('idx_folders_name').on(table.name), +})); + +export const uploadSessions = sqliteTable('upload_sessions', { + id: text('id').primaryKey(), + uploadId: text('upload_id'), + key: text('key').notNull(), + finalKey: text('final_key'), + totalSize: integer('total_size'), + partSize: integer('part_size'), + status: text('status').default('active'), + createdBy: text('created_by'), + createdAt: text('created_at'), + expiresAt: text('expires_at'), +}, (table) => ({ + statusIdx: index('idx_upload_sessions_status').on(table.status), + expiresIdx: index('idx_upload_sessions_expires').on(table.expiresAt), +})); diff --git a/cloudflare/src/middleware/auth.ts b/cloudflare/src/middleware/auth.ts new file mode 100644 index 0000000..e370f5d --- /dev/null +++ b/cloudflare/src/middleware/auth.ts @@ -0,0 +1,32 @@ +import { Context, Next } from 'hono'; +import type { HonoEnv } from '../types'; + +const DEFAULT_DID = 'did:abt:default-uploader'; + +/** + * Auth middleware. + * + * TODO: replace with shijun's CF auth SDK. + * When auth is ready, upload flow should go through Worker (not presigned URL direct upload) + * to ensure every request is authenticated. + */ +export async function authMiddleware(c: Context, next: Next) { + const userId = c.req.header('x-user-did') || DEFAULT_DID; + const adminDids = (c.env.ADMIN_DIDS || DEFAULT_DID).split(',').map((s) => s.trim()); + const isAdmin = adminDids.includes(userId); + + c.set('user', { + id: userId, + role: isAdmin ? 'admin' : 'member', + }); + + return next(); +} + +export async function isAdminMiddleware(c: Context, next: Next) { + const user = c.get('user'); + if (user.role !== 'admin') { + return c.json({ error: 'Admin access required' }, 403); + } + return next(); +} diff --git a/cloudflare/src/routes/cleanup.ts b/cloudflare/src/routes/cleanup.ts new file mode 100644 index 0000000..a50806e --- /dev/null +++ b/cloudflare/src/routes/cleanup.ts @@ -0,0 +1,49 @@ +import { drizzle } from 'drizzle-orm/d1'; +import { eq, and, lt } from 'drizzle-orm'; +import type { Env } from '../types'; +import { uploadSessions } from '../db/schema'; +import { createS3Client, s3AbortMultipartUpload } from '../utils/s3'; + +/** + * Clean up expired upload sessions. + * + * Called from the scheduled cron handler (every hour). + * - Queries D1 for active sessions past their expires_at time + * - For each: aborts the multipart upload via S3 API, updates status to 'aborted' + */ +export async function cleanupExpiredSessions(env: Env): Promise { + const db = drizzle(env.DB); + const now = new Date().toISOString(); + + // Find all active sessions that have expired + const expired = await db + .select() + .from(uploadSessions) + .where( + and( + eq(uploadSessions.status, 'active'), + lt(uploadSessions.expiresAt, now), + ), + ); + + if (expired.length === 0) return; + + const s3 = createS3Client(env); + + for (const session of expired) { + // Attempt to abort the multipart upload in R2 via S3 API + if (session.uploadId && session.key) { + try { + await s3AbortMultipartUpload(s3, env, session.key, session.uploadId); + } catch { + // Ignore errors — upload might already be cleaned up or completed + } + } + + // Mark session as aborted in D1 + await db + .update(uploadSessions) + .set({ status: 'aborted' }) + .where(eq(uploadSessions.id, session.id)); + } +} diff --git a/cloudflare/src/routes/folders.ts b/cloudflare/src/routes/folders.ts new file mode 100644 index 0000000..5f29fa3 --- /dev/null +++ b/cloudflare/src/routes/folders.ts @@ -0,0 +1,77 @@ +import { Hono } from 'hono'; +import { eq, desc } from 'drizzle-orm'; +import type { HonoEnv } from '../types'; +import { isAdminMiddleware } from '../middleware/auth'; +import { folders } from '../db/schema'; + +export const folderRoutes = new Hono(); + +/** + * POST /folders — Create a folder (admin only). + * If a folder with the same name already exists, return the existing one. + */ +folderRoutes.post('/folders', isAdminMiddleware, async (c) => { + const db = c.get('db'); + const user = c.get('user'); + const body = await c.req.json<{ name: string }>(); + + if (!body.name) { + return c.json({ error: 'Folder name is required' }, 400); + } + + // Check if folder already exists + const [existing] = await db + .select() + .from(folders) + .where(eq(folders.name, body.name)) + .limit(1); + + if (existing) { + return c.json({ + _id: existing.id, + name: existing.name, + createdAt: existing.createdAt || '', + updatedAt: existing.updatedAt || '', + createdBy: existing.createdBy || '', + updatedBy: existing.updatedBy || '', + }); + } + + const id = crypto.randomUUID(); + const now = new Date().toISOString(); + + await db.insert(folders).values({ + id, + name: body.name, + createdAt: now, + updatedAt: now, + createdBy: user.id, + updatedBy: user.id, + }); + + return c.json({ + _id: id, + name: body.name, + createdAt: now, + updatedAt: now, + createdBy: user.id, + updatedBy: user.id, + }); +}); + +/** + * GET /folders — List all folders. + */ +folderRoutes.get('/folders', async (c) => { + const db = c.get('db'); + const rows = await db.select().from(folders).orderBy(desc(folders.createdAt)); + + return c.json(rows.map((row) => ({ + _id: row.id, + name: row.name, + createdAt: row.createdAt || '', + updatedAt: row.updatedAt || '', + createdBy: row.createdBy || '', + updatedBy: row.updatedBy || '', + }))); +}); diff --git a/cloudflare/src/routes/serve.ts b/cloudflare/src/routes/serve.ts new file mode 100644 index 0000000..27a4909 --- /dev/null +++ b/cloudflare/src/routes/serve.ts @@ -0,0 +1,68 @@ +import { Hono } from 'hono'; +import type { HonoEnv } from '../types'; + +export const fileServingRoutes = new Hono(); + +/** + * GET /uploads/:filename — Serve files from R2. + * + * Production: images go through cf.image for EXIF stripping + resize. + * Local dev: serve directly from R2 binding (no cf.image). + */ +fileServingRoutes.get('/uploads/:filename', async (c) => { + const { filename } = c.req.param(); + const w = c.req.query('w'); + const h = c.req.query('h'); + const downloadName = c.req.query('filename'); + + const isProduction = c.env.ENVIRONMENT === 'production' && c.env.R2_ORIGIN_DOMAIN; + + const object = await c.env.R2_UPLOADS.head(filename); + if (!object) { + return c.text('404 NOT FOUND', 404); + } + + const contentType = object.httpMetadata?.contentType || 'application/octet-stream'; + const isImage = contentType.startsWith('image/'); + + // Production: use cf.image for EXIF stripping + auto format + resize + if (isImage && isProduction) { + const r2OriginUrl = `https://${c.env.R2_ORIGIN_DOMAIN}/${filename}`; + + const imageOptions: Record = { + metadata: 'none', + format: 'auto', + }; + + if (w) imageOptions.width = parseInt(w, 10); + if (h) imageOptions.height = parseInt(h, 10); + + if (w || h) { + imageOptions.fit = 'contain'; + imageOptions.quality = 85; + } else { + imageOptions.quality = 100; + } + + return fetch(r2OriginUrl, { + cf: { image: imageOptions }, + } as RequestInit); + } + + // Local dev / non-image: serve directly from R2 binding + const r2Object = await c.env.R2_UPLOADS.get(filename); + if (!r2Object) { + return c.text('404 NOT FOUND', 404); + } + + const headers: Record = { + 'Content-Type': contentType, + 'Cache-Control': 'public, max-age=31536000, immutable', + }; + + if (downloadName) { + headers['Content-Disposition'] = `attachment; filename="${encodeURIComponent(downloadName)}"`; + } + + return new Response(r2Object.body, { headers }); +}); diff --git a/cloudflare/src/routes/status.ts b/cloudflare/src/routes/status.ts new file mode 100644 index 0000000..7b98697 --- /dev/null +++ b/cloudflare/src/routes/status.ts @@ -0,0 +1,29 @@ +import { Hono } from 'hono'; +import type { HonoEnv } from '../types'; + +export const statusRoutes = new Hono(); + +/** + * GET /uploader/status — Return uploader config for the frontend. + */ +statusRoutes.get('/uploader/status', async (c) => { + const env = c.env; + + const allowedFileTypes = env.ALLOWED_FILE_TYPES || '.jpeg,.png,.gif,.svg,.webp,.bmp,.ico'; + const maxUploadSize = env.MAX_UPLOAD_SIZE || '100MB'; + const isUnsplashEnabled = !!(env.UNSPLASH_KEY && env.UNSPLASH_SECRET); + const isAiImageEnabled = env.USE_AI_IMAGE !== 'false'; + + return c.json({ + uploadMode: 'presigned', + restrictions: { + allowedFileExts: allowedFileTypes, + maxFileSize: maxUploadSize, + }, + availablePluginMap: { + Uploaded: true, + ...(isUnsplashEnabled ? { Unsplash: true } : {}), + ...(isAiImageEnabled ? { AIImage: true } : {}), + }, + }); +}); diff --git a/cloudflare/src/routes/unsplash.ts b/cloudflare/src/routes/unsplash.ts new file mode 100644 index 0000000..8f7ce1c --- /dev/null +++ b/cloudflare/src/routes/unsplash.ts @@ -0,0 +1,115 @@ +import { Hono } from 'hono'; +import type { HonoEnv } from '../types'; +import { uploads } from '../db/schema'; + +export const unsplashRoutes = new Hono(); + +/** + * GET /unsplash/search — Search Unsplash photos. + * Proxies to Unsplash API with Client-ID auth. + * Returns structured results with attribution (ToS compliant). + */ +unsplashRoutes.get('/unsplash/search', async (c) => { + const query = c.req.query('q'); + if (!query) { + return c.json({ error: 'query parameter "q" is required' }, 400); + } + + const page = c.req.query('page') || '1'; + const perPage = c.req.query('per_page') || '30'; + + const res = await fetch( + `https://api.unsplash.com/search/photos?query=${encodeURIComponent(query)}&page=${page}&per_page=${perPage}`, + { + headers: { + Authorization: `Client-ID ${c.env.UNSPLASH_KEY}`, + }, + }, + ); + + if (!res.ok) { + return c.json({ error: 'Unsplash API request failed' }, res.status as 400); + } + + const data = (await res.json()) as { + results: Array<{ + id: string; + urls: Record; + user: { name: string; username: string; links: { html: string } }; + links: { download_location: string }; + width: number; + height: number; + description: string | null; + alt_description: string | null; + }>; + total: number; + total_pages: number; + }; + + return c.json({ + results: data.results.map((photo) => ({ + id: photo.id, + urls: photo.urls, + attribution: { + name: photo.user.name, + username: photo.user.username, + link: photo.user.links.html, + }, + download_location: photo.links.download_location, + width: photo.width, + height: photo.height, + description: photo.description || photo.alt_description, + })), + total: data.total, + total_pages: data.total_pages, + }); +}); + +/** + * POST /unsplash/track-download — Track download + save reference in D1. + * Required by Unsplash API ToS: must trigger download tracking when user selects a photo. + * Saves a reference record in the uploads table with `unsplash:{photoId}` as filename + * (not an actual R2 key — images are hotlinked, not re-hosted). + */ +unsplashRoutes.post('/unsplash/track-download', async (c) => { + const body = await c.req.json<{ + downloadLocation: string; + photoId: string; + attribution: { name: string; username: string; link: string }; + }>(); + + const { downloadLocation, photoId, attribution } = body; + + if (!downloadLocation || !photoId || !attribution) { + return c.json({ error: 'downloadLocation, photoId, and attribution are required' }, 400); + } + + // Required by Unsplash API: trigger download tracking + await fetch(downloadLocation, { + headers: { + Authorization: `Client-ID ${c.env.UNSPLASH_KEY}`, + }, + }); + + // Save reference in D1 (NOT the image file — hotlink only) + const db = c.get('db'); + const user = c.get('user'); + const now = new Date().toISOString(); + const id = crypto.randomUUID(); + + await db.insert(uploads).values({ + id, + filename: `unsplash:${photoId}`, // special prefix, not an R2 key + originalname: `${attribution.name} via Unsplash`, + mimetype: 'image/jpeg', + size: 0, // not stored locally + folderId: c.req.header('x-folder-id') || null, + remark: JSON.stringify({ unsplash: true, attribution, photoId }), + createdAt: now, + updatedAt: now, + createdBy: user.id, + updatedBy: user.id, + }); + + return c.json({ id, photoId }); +}); diff --git a/cloudflare/src/routes/upload.ts b/cloudflare/src/routes/upload.ts new file mode 100644 index 0000000..025bbce --- /dev/null +++ b/cloudflare/src/routes/upload.ts @@ -0,0 +1,725 @@ +import { Hono } from 'hono'; +import { eq, and, like, desc, sql, inArray } from 'drizzle-orm'; +import type { HonoEnv, CheckResponse, PresignResponse, ConfirmResponse } from '../types'; +import { uploads, uploadTags, uploadSessions, folders } from '../db/schema'; +import { + createS3Client, + generatePresignedPutUrl, + s3CreateMultipartUpload, + s3CompleteMultipartUpload, + s3AbortMultipartUpload, + s3ListParts, +} from '../utils/s3'; +import { streamMD5, detectMimeType, sanitizeSvg } from '../utils/hash'; +import { isAdminMiddleware } from '../middleware/auth'; + +const MULTIPART_THRESHOLD = 100 * 1024 * 1024; // 100MB +const DEFAULT_PART_SIZE = 10 * 1024 * 1024; // 10MB +const MIN_PART_SIZE = 5 * 1024 * 1024; // 5MB +const MAX_PARTS = 10000; +const SESSION_EXPIRY_HOURS = 24; +function parseSize(size: string): number { + const match = size.match(/^(\d+)\s*(MB|GB|KB)?$/i); + if (!match) return 500 * 1024 * 1024; + const num = parseInt(match[1], 10); + const unit = (match[2] || 'MB').toUpperCase(); + if (unit === 'GB') return num * 1024 * 1024 * 1024; + if (unit === 'KB') return num * 1024; + return num * 1024 * 1024; +} + +export const uploadRoutes = new Hono(); + +// POST /uploads/check — Dedup check by size+ext +uploadRoutes.post('/uploads/check', async (c) => { + const { size, ext } = await c.req.json<{ size: number; ext: string }>(); + if (!size || !ext) { + return c.json({ exists: false } satisfies CheckResponse); + } + + const db = c.get('db'); + const cleanExt = ext.replace(/^\./, ''); + const matches = await db + .select() + .from(uploads) + .where(and(eq(uploads.size, size), like(uploads.filename, `%.${cleanExt}`))); + + if (matches.length === 1) { + const match = matches[0]; + return c.json({ + exists: true, + url: `/uploads/${match.filename}`, + filename: match.filename, + uploadId: match.id, + } satisfies CheckResponse); + } + + return c.json({ exists: false } satisfies CheckResponse); +}); + +// POST /uploads/presign — Generate presigned URL or create multipart session +uploadRoutes.post('/uploads/presign', async (c) => { + const { originalname, mimetype, size, ext, folderId } = await c.req.json<{ + originalname: string; + mimetype?: string; + size: number; + ext: string; + folderId?: string; + }>(); + + const db = c.get('db'); + const user = c.get('user'); + const s3 = createS3Client(c.env); + const cleanExt = ext.replace(/^\./, ''); + const sessionId = crypto.randomUUID(); + const tempKey = `tmp/${crypto.randomUUID()}.${cleanExt}`; + const now = new Date().toISOString(); + const expiresAt = new Date(Date.now() + SESSION_EXPIRY_HOURS * 60 * 60 * 1000).toISOString(); + + const isMultipart = size >= MULTIPART_THRESHOLD; + + if (isMultipart) { + const uploadId = await s3CreateMultipartUpload(s3, c.env, tempKey, mimetype); + + let partSize = DEFAULT_PART_SIZE; + let partCount = Math.ceil(size / partSize); + if (partCount > MAX_PARTS) { + partSize = Math.ceil(size / MAX_PARTS); + if (partSize < MIN_PART_SIZE) partSize = MIN_PART_SIZE; + partCount = Math.ceil(size / partSize); + } + + await db.insert(uploadSessions).values({ + id: sessionId, + uploadId, + key: tempKey, + totalSize: size, + partSize, + status: 'active', + createdBy: user.id, + createdAt: now, + expiresAt, + }); + + return c.json({ + sessionId, + multipart: true, + uploadId, + key: tempKey, + partSize, + partCount, + } satisfies PresignResponse); + } + + await db.insert(uploadSessions).values({ + id: sessionId, + key: tempKey, + totalSize: size, + status: 'active', + createdBy: user.id, + createdAt: now, + expiresAt, + }); + + // In dev mode, return a proxy URL through the worker (avoids CORS with remote R2) + const isDev = c.env.ENVIRONMENT === 'development'; + let presignedUrl: string; + + if (isDev) { + presignedUrl = `/api/uploads/proxy-put/${sessionId}`; + } else { + presignedUrl = await generatePresignedPutUrl(s3, c.env, tempKey, { + contentType: mimetype, + }); + } + + return c.json({ + sessionId, + presignedUrl, + } satisfies PresignResponse); +}); + +// POST /uploads/confirm — Finalize upload +uploadRoutes.post('/uploads/confirm', async (c) => { + const body = await c.req.json<{ + sessionId?: string; + existingUploadId?: string; + originalname?: string; + mimetype?: string; + folderId?: string; + tags?: string; + }>(); + + const db = c.get('db'); + const user = c.get('user'); + + // Dedup shortcut: clone existing record for current user + if (body.existingUploadId) { + const [existing] = await db + .select() + .from(uploads) + .where(eq(uploads.id, body.existingUploadId)) + .limit(1); + + if (!existing) { + return c.json({ error: 'Upload not found' }, 404); + } + + const newId = crypto.randomUUID(); + const now = new Date().toISOString(); + + await db.insert(uploads).values({ + id: newId, + filename: existing.filename, + originalname: body.originalname || existing.originalname, + mimetype: existing.mimetype, + size: existing.size, + folderId: body.folderId || existing.folderId, + createdAt: now, + updatedAt: now, + createdBy: user.id, + updatedBy: user.id, + }); + + if (body.tags) { + const tagList = body.tags.split(',').map((t) => t.trim()).filter(Boolean); + if (tagList.length > 0) { + await db.insert(uploadTags).values(tagList.map((tag) => ({ uploadId: newId, tag }))); + } + } + + // Fetch tags for the new record + const tags = await db + .select({ tag: uploadTags.tag }) + .from(uploadTags) + .where(eq(uploadTags.uploadId, newId)); + + return c.json({ + _id: newId, + filename: existing.filename, + originalname: body.originalname || existing.originalname || '', + mimetype: existing.mimetype || '', + size: existing.size || 0, + url: `/uploads/${existing.filename}`, + createdAt: now, + createdBy: user.id, + tags: tags.map((t) => t.tag), + } satisfies ConfirmResponse); + } + + // Normal confirm flow + if (!body.sessionId) { + return c.json({ error: 'sessionId or existingUploadId required' }, 400); + } + + const [session] = await db + .select() + .from(uploadSessions) + .where(eq(uploadSessions.id, body.sessionId)) + .limit(1); + + if (!session) { + return c.json({ error: 'Session not found' }, 404); + } + + if (session.status !== 'active') { + return c.json({ error: 'Session is not active' }, 400); + } + + // Reject files exceeding MAX_UPLOAD_SIZE (would need Queue for async hash processing) + const maxSize = parseSize(c.env.MAX_UPLOAD_SIZE || '500MB'); + if (session.totalSize && session.totalSize > maxSize) { + return c.json({ error: `File size exceeds limit (${c.env.MAX_UPLOAD_SIZE || '500MB'})` }, 400); + } + + // Range-read first 4KB for MIME detection (separate from full body) + const rangeObj = await c.env.R2_UPLOADS.get(session.key, { range: { offset: 0, length: 4096 } }); + if (!rangeObj) { + return c.json({ error: 'Temp file not found in R2' }, 404); + } + const headerBytes = new Uint8Array(await rangeObj.arrayBuffer()); + const detectedMime = detectMimeType(headerBytes); + + if (body.mimetype && detectedMime && !areMimeTypesCompatible(detectedMime, body.mimetype)) { + await c.env.R2_UPLOADS.delete(session.key); + await db.update(uploadSessions).set({ status: 'aborted' }).where(eq(uploadSessions.id, body.sessionId)); + return c.json({ error: 'File content does not match claimed MIME type' }, 400); + } + + const finalMime = detectedMime || body.mimetype || 'application/octet-stream'; + + // SVG sanitization — needs full object read + if (finalMime === 'image/svg+xml') { + const svgObj = await c.env.R2_UPLOADS.get(session.key); + if (svgObj) { + const svgText = await svgObj.text(); + const sanitized = sanitizeSvg(svgText); + if (sanitized !== svgText) { + await c.env.R2_UPLOADS.put(session.key, sanitized); + } + } + } + + // Streaming MD5 hash — always get a fresh stream + const hashObj = await c.env.R2_UPLOADS.get(session.key); + if (!hashObj) { + return c.json({ error: 'Failed to read file for hashing' }, 500); + } + + const md5Hash = await streamMD5(hashObj.body); + const fileSize = session.totalSize || hashObj.size; + const ext = session.key.split('.').pop() || ''; + const finalKey = `${md5Hash}.${ext}`; + + // Check if final key already exists (content dedup by MD5) + const existingObject = await c.env.R2_UPLOADS.head(finalKey); + if (existingObject) { + // MD5-based key match means content is identical, just delete temp + await c.env.R2_UPLOADS.delete(session.key); + } else { + // Use R2 binding for copy (works in local dev with miniflare) + const srcObj = await c.env.R2_UPLOADS.get(session.key); + if (srcObj) { + await c.env.R2_UPLOADS.put(finalKey, srcObj.body, { + httpMetadata: srcObj.httpMetadata, + }); + } + await c.env.R2_UPLOADS.delete(session.key); + } + + // Insert D1 record + const newId = crypto.randomUUID(); + const now = new Date().toISOString(); + + await db.insert(uploads).values({ + id: newId, + filename: finalKey, + originalname: body.originalname || finalKey, + mimetype: finalMime, + size: fileSize, + folderId: body.folderId, + createdAt: now, + updatedAt: now, + createdBy: user.id, + updatedBy: user.id, + }); + + if (body.tags) { + const tagList = body.tags.split(',').map((t) => t.trim()).filter(Boolean); + if (tagList.length > 0) { + await db.insert(uploadTags).values(tagList.map((tag) => ({ uploadId: newId, tag }))); + } + } + + await db + .update(uploadSessions) + .set({ status: 'completed', finalKey }) + .where(eq(uploadSessions.id, body.sessionId)); + + // Fetch tags for response + const responseTags = body.tags + ? body.tags.split(',').map((t) => t.trim()).filter(Boolean) + : []; + + return c.json({ + _id: newId, + filename: finalKey, + originalname: body.originalname || finalKey, + mimetype: finalMime, + size: fileSize, + url: `/uploads/${finalKey}`, + createdAt: now, + createdBy: user.id, + tags: responseTags, + } satisfies ConfirmResponse); +}); + +// POST /uploads/multipart/part-url — Get presigned URL for a single part +uploadRoutes.post('/uploads/multipart/part-url', async (c) => { + const { sessionId, partNumber } = await c.req.json<{ sessionId: string; partNumber: number }>(); + const db = c.get('db'); + + const [session] = await db + .select() + .from(uploadSessions) + .where(eq(uploadSessions.id, sessionId)) + .limit(1); + + if (!session || session.status !== 'active') { + return c.json({ error: 'Session not found or not active' }, 400); + } + + if (!session.uploadId) { + return c.json({ error: 'Session is not a multipart upload' }, 400); + } + + const s3 = createS3Client(c.env); + const presignedUrl = await generatePresignedPutUrl(s3, c.env, session.key, { + partNumber, + uploadId: session.uploadId, + }); + + return c.json({ presignedUrl, partNumber }); +}); + +// POST /uploads/multipart/complete — Complete multipart upload +uploadRoutes.post('/uploads/multipart/complete', async (c) => { + const { sessionId, parts } = await c.req.json<{ + sessionId: string; + parts: Array<{ partNumber: number; etag: string }>; + }>(); + const db = c.get('db'); + + const [session] = await db + .select() + .from(uploadSessions) + .where(eq(uploadSessions.id, sessionId)) + .limit(1); + + if (!session || session.status !== 'active') { + return c.json({ error: 'Session not found or not active' }, 400); + } + + if (!session.uploadId) { + return c.json({ error: 'Session is not a multipart upload' }, 400); + } + + const s3 = createS3Client(c.env); + await s3CompleteMultipartUpload(s3, c.env, session.key, session.uploadId, parts); + + return c.json({ status: 'assembled' }); +}); + +// POST /uploads/multipart/abort — Abort multipart upload +uploadRoutes.post('/uploads/multipart/abort', async (c) => { + const { sessionId } = await c.req.json<{ sessionId: string }>(); + const db = c.get('db'); + + const [session] = await db + .select() + .from(uploadSessions) + .where(eq(uploadSessions.id, sessionId)) + .limit(1); + + if (!session) { + return c.json({ error: 'Session not found' }, 404); + } + + if (session.uploadId) { + const s3 = createS3Client(c.env); + await s3AbortMultipartUpload(s3, c.env, session.key, session.uploadId); + } + + await db + .update(uploadSessions) + .set({ status: 'aborted' }) + .where(eq(uploadSessions.id, sessionId)); + + return c.json({ status: 'aborted' }); +}); + +// GET /uploads/multipart/status — Query completed parts +uploadRoutes.get('/uploads/multipart/status', async (c) => { + const sessionId = c.req.query('sessionId'); + if (!sessionId) { + return c.json({ error: 'sessionId required' }, 400); + } + + const db = c.get('db'); + const [session] = await db + .select() + .from(uploadSessions) + .where(eq(uploadSessions.id, sessionId)) + .limit(1); + + if (!session) { + return c.json({ error: 'Session not found' }, 404); + } + + if (!session.uploadId) { + return c.json({ completedParts: [], status: session.status }); + } + + const s3 = createS3Client(c.env); + const completedParts = await s3ListParts(s3, c.env, session.key, session.uploadId); + + return c.json({ completedParts, status: session.status }); +}); + +// PUT /uploads/proxy-put/:sessionId — Dev-mode proxy: receive file body and put to local R2 +uploadRoutes.put('/uploads/proxy-put/:sessionId', async (c) => { + const db = c.get('db'); + const sessionId = c.req.param('sessionId'); + + const [session] = await db + .select() + .from(uploadSessions) + .where(eq(uploadSessions.id, sessionId)) + .limit(1); + + if (!session || session.status !== 'active') { + return c.json({ error: 'Session not found or not active' }, 400); + } + + const body = await c.req.arrayBuffer(); + const contentType = c.req.header('content-type') || 'application/octet-stream'; + + await c.env.R2_UPLOADS.put(session.key, body, { + httpMetadata: { contentType }, + }); + + return new Response(null, { status: 200 }); +}); + +// POST /uploads/direct — Direct upload through Worker (for local dev or small files) +uploadRoutes.post('/uploads/direct', async (c) => { + const db = c.get('db'); + const user = c.get('user'); + + const formData = await c.req.formData(); + const file = formData.get('file') as File | null; + const folderId = (formData.get('folderId') as string) || ''; + const tags = (formData.get('tags') as string) || ''; + + if (!file) { + return c.json({ error: 'No file provided' }, 400); + } + + const ext = file.name.split('.').pop() || 'bin'; + const tempKey = `tmp/${crypto.randomUUID()}.${ext}`; + + // Upload to R2 binding directly (works in local dev) + await c.env.R2_UPLOADS.put(tempKey, file.stream(), { + httpMetadata: { contentType: file.type }, + }); + + // Read back for MD5 hash + const obj = await c.env.R2_UPLOADS.get(tempKey); + if (!obj) { + return c.json({ error: 'Failed to read uploaded file' }, 500); + } + + const md5Hash = await streamMD5(obj.body); + const finalKey = `${md5Hash}.${ext}`; + + // Content dedup — use R2 binding directly (no S3 client needed) + const existingObj = await c.env.R2_UPLOADS.head(finalKey); + if (existingObj) { + await c.env.R2_UPLOADS.delete(tempKey); + } else { + // Copy via R2 binding: get + put + delete temp + const srcObj = await c.env.R2_UPLOADS.get(tempKey); + if (srcObj) { + await c.env.R2_UPLOADS.put(finalKey, srcObj.body, { + httpMetadata: srcObj.httpMetadata, + }); + } + await c.env.R2_UPLOADS.delete(tempKey); + } + + const newId = crypto.randomUUID(); + const now = new Date().toISOString(); + + await db.insert(uploads).values({ + id: newId, + filename: finalKey, + originalname: file.name, + mimetype: file.type || 'application/octet-stream', + size: file.size, + folderId: folderId || null, + createdAt: now, + updatedAt: now, + createdBy: user.id, + updatedBy: user.id, + }); + + if (tags) { + const tagList = tags.split(',').map((t) => t.trim()).filter(Boolean); + if (tagList.length > 0) { + await db.insert(uploadTags).values(tagList.map((tag) => ({ uploadId: newId, tag }))); + } + } + + return c.json({ + _id: newId, + filename: finalKey, + originalname: file.name, + mimetype: file.type || 'application/octet-stream', + size: file.size, + url: `/uploads/${finalKey}`, + createdAt: now, + createdBy: user.id, + tags: tags ? tags.split(',').map((t) => t.trim()).filter(Boolean) : [], + }); +}); + +// GET /uploads — List uploads with pagination (inline Drizzle query) +uploadRoutes.get('/uploads', async (c) => { + const db = c.get('db'); + const user = c.get('user'); + + const page = parseInt(c.req.query('page') || '1', 10); + const pageSize = parseInt(c.req.query('pageSize') || '20', 10); + const folderId = c.req.query('folderId'); + const tag = c.req.query('tag') || c.req.query('tags'); + const createdBy = c.req.query('createdBy'); + + const conditions: ReturnType[] = []; + + // Admin can see all uploads; members only see their own + if (user.role === 'admin' && createdBy) { + conditions.push(eq(uploads.createdBy, createdBy)); + } else if (user.role !== 'admin') { + conditions.push(eq(uploads.createdBy, user.id)); + } + + if (folderId) { + conditions.push(eq(uploads.folderId, folderId)); + } + + if (tag) { + const taggedIds = db + .select({ uploadId: uploadTags.uploadId }) + .from(uploadTags) + .where(eq(uploadTags.tag, tag)); + conditions.push(inArray(uploads.id, taggedIds)); + } + + const whereClause = conditions.length > 0 ? and(...conditions) : undefined; + + const [data, countResult] = await Promise.all([ + db + .select() + .from(uploads) + .where(whereClause) + .orderBy(desc(uploads.createdAt), desc(uploads.updatedAt)) + .limit(pageSize) + .offset((page - 1) * pageSize), + db + .select({ count: sql`count(*)` }) + .from(uploads) + .where(whereClause), + ]); + + // Fetch tags for all uploads in the result + const uploadIds = data.map((u) => u.id); + const allTags = uploadIds.length > 0 + ? await db.select().from(uploadTags).where(inArray(uploadTags.uploadId, uploadIds)) + : []; + const tagsByUpload = new Map(); + allTags.forEach((t) => { + const list = tagsByUpload.get(t.uploadId) || []; + list.push(t.tag); + tagsByUpload.set(t.uploadId, list); + }); + + const rows = data.map((row) => ({ + _id: row.id, + filename: row.filename, + originalname: row.originalname || '', + mimetype: row.mimetype || '', + size: row.size || 0, + remark: row.remark || '', + folderId: row.folderId, + tags: tagsByUpload.get(row.id) || [], + url: `/uploads/${row.filename}`, + createdAt: row.createdAt || '', + updatedAt: row.updatedAt || '', + createdBy: row.createdBy || '', + updatedBy: row.updatedBy || '', + })); + + // Fetch folders for the response (matches original blocklet API format) + const allFolders = await db.select().from(folders).orderBy(desc(folders.createdAt)); + const folderRows = allFolders.map((f: any) => ({ + _id: f.id, + name: f.name, + createdAt: f.createdAt || '', + updatedAt: f.updatedAt || '', + createdBy: f.createdBy || '', + updatedBy: f.updatedBy || '', + })); + + const total = countResult[0]?.count ?? 0; + + return c.json({ + uploads: rows, + folders: folderRows, + total, + page, + pageSize, + pageCount: Math.ceil(total / pageSize), + }); +}); + +// DELETE /uploads/:id — Delete upload (admin only) +uploadRoutes.delete('/uploads/:id', isAdminMiddleware, async (c) => { + const db = c.get('db'); + const id = c.req.param('id'); + + const [record] = await db.select().from(uploads).where(eq(uploads.id, id)).limit(1); + if (!record) { + return c.json({ error: 'Upload not found' }, 404); + } + + // Check if any other records reference the same filename + const [countResult] = await db + .select({ count: sql`count(*)` }) + .from(uploads) + .where(eq(uploads.filename, record.filename)); + + // Only delete the actual file if this is the last reference + if ((countResult?.count ?? 0) <= 1) { + await c.env.R2_UPLOADS.delete(record.filename); + } + + // Delete tags and record + await db.delete(uploadTags).where(eq(uploadTags.uploadId, id)); + await db.delete(uploads).where(eq(uploads.id, id)); + + return c.json({ success: true }); +}); + +// PUT /uploads/:id — Move to folder (admin only) +uploadRoutes.put('/uploads/:id', isAdminMiddleware, async (c) => { + const db = c.get('db'); + const id = c.req.param('id'); + const body = await c.req.json<{ folderId: string }>(); + + const [record] = await db.select().from(uploads).where(eq(uploads.id, id)).limit(1); + if (!record) { + return c.json({ error: 'Upload not found' }, 404); + } + + const now = new Date().toISOString(); + await db + .update(uploads) + .set({ folderId: body.folderId, updatedAt: now }) + .where(eq(uploads.id, id)); + + const [updated] = await db.select().from(uploads).where(eq(uploads.id, id)).limit(1); + + return c.json({ + _id: updated.id, + filename: updated.filename, + originalname: updated.originalname || '', + mimetype: updated.mimetype || '', + size: updated.size || 0, + folderId: updated.folderId, + url: `/uploads/${updated.filename}`, + createdAt: updated.createdAt || '', + updatedAt: updated.updatedAt || '', + createdBy: updated.createdBy || '', + updatedBy: updated.updatedBy || '', + }); +}); + +function areMimeTypesCompatible(detected: string, claimed: string): boolean { + if (detected === claimed) return true; + const detectedBase = detected.split('/')[0]; + const claimedBase = claimed.split('/')[0]; + if (detectedBase === claimedBase) return true; + // application/octet-stream is a generic fallback, always compatible + if (claimed === 'application/octet-stream') return true; + return false; +} diff --git a/cloudflare/src/types.ts b/cloudflare/src/types.ts new file mode 100644 index 0000000..8af69fd --- /dev/null +++ b/cloudflare/src/types.ts @@ -0,0 +1,84 @@ +import type { DrizzleD1Database } from 'drizzle-orm/d1'; + +export interface Env { + // D1 Database + DB: D1Database; + // R2 Bucket + R2_UPLOADS: R2Bucket; + // R2 S3 credentials (for presigned URLs) + R2_ACCESS_KEY_ID: string; + R2_SECRET_ACCESS_KEY: string; + CF_ACCOUNT_ID: string; + // R2 origin domain for cf.image (protected by IP Access Rule) + R2_ORIGIN_DOMAIN: string; + // Environment config + ENVIRONMENT: string; + MAX_UPLOAD_SIZE: string; + ALLOWED_FILE_TYPES: string; + USE_AI_IMAGE: string; + ADMIN_DIDS: string; + // Unsplash + UNSPLASH_KEY: string; + UNSPLASH_SECRET: string; + // AIGNE Hub + AIGNE_HUB_URL: string; + AIGNE_HUB_API_KEY: string; + // Upload Queue (for large file async confirm) + CONFIRM_QUEUE: Queue; +} + +export interface ConfirmQueueMessage { + sessionId: string; + userId: string; +} + +export interface UserContext { + id: string; + role: 'admin' | 'member'; +} + +// Hono env bindings +export type HonoEnv = { + Bindings: Env; + Variables: { + user: UserContext; + db: DrizzleD1Database; + }; +}; + +// API response types +export interface CheckResponse { + exists: boolean; + url?: string; + filename?: string; + uploadId?: string; +} + +export interface PresignResponse { + sessionId: string; + presignedUrl?: string; + multipart?: boolean; + uploadId?: string; + key?: string; + partSize?: number; + partCount?: number; +} + +export interface ConfirmResponse { + _id: string; + filename: string; + originalname: string; + mimetype: string; + size: number; + url: string; + createdAt: string; + createdBy: string; + tags?: string[]; +} + +export interface ListResponse { + data: ConfirmResponse[]; + total: number; + page: number; + pageSize: number; +} diff --git a/cloudflare/src/utils/hash.ts b/cloudflare/src/utils/hash.ts new file mode 100644 index 0000000..2eafb87 --- /dev/null +++ b/cloudflare/src/utils/hash.ts @@ -0,0 +1,128 @@ +// @ts-ignore - js-md5 types don't match the actual API +import md5 from 'js-md5'; + +/** + * Compute MD5 hash of an R2 object using streaming (O(1) memory). + * Workers WebCrypto does NOT support MD5 — must use js-md5. + */ +export async function streamMD5(body: ReadableStream): Promise { + const hasher = (md5 as any).create(); + const reader = body.getReader(); + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + hasher.update(value); + } + + return hasher.hex(); +} + +/** + * Detect MIME type from file magic bytes (first 4KB). + * Returns detected MIME type or null if unknown. + */ +export function detectMimeType(bytes: Uint8Array): string | null { + if (bytes.length < 4) return null; + + // JPEG: FF D8 FF + if (bytes[0] === 0xff && bytes[1] === 0xd8 && bytes[2] === 0xff) { + return 'image/jpeg'; + } + + // PNG: 89 50 4E 47 0D 0A 1A 0A + if ( + bytes[0] === 0x89 && + bytes[1] === 0x50 && + bytes[2] === 0x4e && + bytes[3] === 0x47 + ) { + return 'image/png'; + } + + // GIF: 47 49 46 38 + if ( + bytes[0] === 0x47 && + bytes[1] === 0x49 && + bytes[2] === 0x46 && + bytes[3] === 0x38 + ) { + return 'image/gif'; + } + + // WebP: 52 49 46 46 ... 57 45 42 50 + if ( + bytes[0] === 0x52 && + bytes[1] === 0x49 && + bytes[2] === 0x46 && + bytes[3] === 0x46 && + bytes.length >= 12 && + bytes[8] === 0x57 && + bytes[9] === 0x45 && + bytes[10] === 0x42 && + bytes[11] === 0x50 + ) { + return 'image/webp'; + } + + // BMP: 42 4D + if (bytes[0] === 0x42 && bytes[1] === 0x4d) { + return 'image/bmp'; + } + + // ICO: 00 00 01 00 + if ( + bytes[0] === 0x00 && + bytes[1] === 0x00 && + bytes[2] === 0x01 && + bytes[3] === 0x00 + ) { + return 'image/x-icon'; + } + + // PDF: 25 50 44 46 (%PDF) + if ( + bytes[0] === 0x25 && + bytes[1] === 0x50 && + bytes[2] === 0x44 && + bytes[3] === 0x46 + ) { + return 'application/pdf'; + } + + // SVG: check for /gi, ''); + + // Remove on* event attributes + sanitized = sanitized.replace(/\s+on\w+\s*=\s*("[^"]*"|'[^']*'|[^\s>]+)/gi, ''); + + // Remove javascript: URLs + sanitized = sanitized.replace(/href\s*=\s*["']javascript:[^"']*["']/gi, 'href=""'); + sanitized = sanitized.replace(/xlink:href\s*=\s*["']javascript:[^"']*["']/gi, 'xlink:href=""'); + + // Remove data: URLs (potential XSS vector) + sanitized = sanitized.replace(/href\s*=\s*["']data:[^"']*["']/gi, 'href=""'); + + // Remove foreignObject (can embed arbitrary HTML) + sanitized = sanitized.replace(//gi, ''); + + // Remove use elements pointing to external resources + sanitized = sanitized.replace(/]*href\s*=\s*["']https?:[^"']*["'][^>]*\/>/gi, ''); + + return sanitized; +} diff --git a/cloudflare/src/utils/s3.ts b/cloudflare/src/utils/s3.ts new file mode 100644 index 0000000..88bf709 --- /dev/null +++ b/cloudflare/src/utils/s3.ts @@ -0,0 +1,173 @@ +import { AwsClient } from 'aws4fetch'; +import { XMLParser } from 'fast-xml-parser'; +import type { Env } from '../types'; + +export function createS3Client(env: Env) { + return new AwsClient({ + accessKeyId: env.R2_ACCESS_KEY_ID, + secretAccessKey: env.R2_SECRET_ACCESS_KEY, + region: 'auto', + service: 's3', + }); +} + +function s3Endpoint(env: Env, key: string) { + return `https://${env.CF_ACCOUNT_ID}.r2.cloudflarestorage.com/media-kit-uploads/${key}`; +} + +export async function generatePresignedPutUrl( + s3: AwsClient, + env: Env, + key: string, + options: { + expiresIn?: number; + contentType?: string; + partNumber?: number; + uploadId?: string; + } = {}, +) { + const { expiresIn = 3600, contentType, partNumber, uploadId } = options; + const url = new URL(s3Endpoint(env, key)); + + if (partNumber && uploadId) { + url.searchParams.set('partNumber', String(partNumber)); + url.searchParams.set('uploadId', uploadId); + } + url.searchParams.set('X-Amz-Expires', String(expiresIn)); + + const headers: Record = {}; + if (contentType) headers['Content-Type'] = contentType; + + const signed = await s3.sign( + new Request(url.toString(), { method: 'PUT', headers }), + { aws: { signQuery: true } }, + ); + + return signed.url; +} + +export async function s3CopyObject( + s3: AwsClient, + env: Env, + sourceKey: string, + destKey: string, +) { + const endpoint = s3Endpoint(env, destKey); + const signed = await s3.sign( + new Request(endpoint, { + method: 'PUT', + headers: { + 'x-amz-copy-source': `/media-kit-uploads/${sourceKey}`, + }, + }), + ); + const res = await fetch(signed); + if (!res.ok) { + const body = await res.text(); + throw new Error(`CopyObject failed: ${res.status} ${body}`); + } +} + +export async function s3CreateMultipartUpload( + s3: AwsClient, + env: Env, + key: string, + contentType?: string, +): Promise { + const url = new URL(s3Endpoint(env, key)); + url.searchParams.set('uploads', ''); + + const headers: Record = {}; + if (contentType) headers['Content-Type'] = contentType; + + const signed = await s3.sign( + new Request(url.toString(), { method: 'POST', headers }), + ); + const res = await fetch(signed); + if (!res.ok) throw new Error(`CreateMultipartUpload failed: ${res.status}`); + + const xml = await res.text(); + const parser = new XMLParser(); + const parsed = parser.parse(xml); + return parsed.InitiateMultipartUploadResult.UploadId; +} + +export async function s3CompleteMultipartUpload( + s3: AwsClient, + env: Env, + key: string, + uploadId: string, + parts: Array<{ partNumber: number; etag: string }>, +) { + const url = new URL(s3Endpoint(env, key)); + url.searchParams.set('uploadId', uploadId); + + const partsXml = parts + .sort((a, b) => a.partNumber - b.partNumber) + .map( + (p) => + `${p.partNumber}${p.etag}`, + ) + .join(''); + const body = `${partsXml}`; + + const signed = await s3.sign( + new Request(url.toString(), { + method: 'POST', + headers: { 'Content-Type': 'application/xml' }, + body, + }), + ); + const res = await fetch(signed); + if (!res.ok) { + const respBody = await res.text(); + throw new Error(`CompleteMultipartUpload failed: ${res.status} ${respBody}`); + } +} + +export async function s3AbortMultipartUpload( + s3: AwsClient, + env: Env, + key: string, + uploadId: string, +) { + const url = new URL(s3Endpoint(env, key)); + url.searchParams.set('uploadId', uploadId); + + const signed = await s3.sign( + new Request(url.toString(), { method: 'DELETE' }), + ); + const res = await fetch(signed); + if (!res.ok) { + // Ignore errors — might already be cleaned up + } +} + +export async function s3ListParts( + s3: AwsClient, + env: Env, + key: string, + uploadId: string, +): Promise> { + const url = new URL(s3Endpoint(env, key)); + url.searchParams.set('uploadId', uploadId); + + const signed = await s3.sign( + new Request(url.toString(), { method: 'GET' }), + ); + const res = await fetch(signed); + if (!res.ok) return []; + + const xml = await res.text(); + const parser = new XMLParser(); + const parsed = parser.parse(xml); + const rawParts = parsed?.ListPartsResult?.Part; + if (!rawParts) return []; + + const partArray = Array.isArray(rawParts) ? rawParts : [rawParts]; + return partArray.map((p: any) => ({ + partNumber: Number(p.PartNumber), + etag: String(p.ETag), + size: Number(p.Size), + })); +} diff --git a/cloudflare/src/worker.ts b/cloudflare/src/worker.ts new file mode 100644 index 0000000..32034ec --- /dev/null +++ b/cloudflare/src/worker.ts @@ -0,0 +1,118 @@ +import { Hono } from 'hono'; +import { cors } from 'hono/cors'; +import { drizzle } from 'drizzle-orm/d1'; +import type { HonoEnv, Env } from './types'; +import { authMiddleware } from './middleware/auth'; +import { uploadRoutes } from './routes/upload'; +import { fileServingRoutes } from './routes/serve'; +import { folderRoutes } from './routes/folders'; +import { unsplashRoutes } from './routes/unsplash'; +import { statusRoutes } from './routes/status'; +import { cleanupExpiredSessions } from './routes/cleanup'; + +const app = new Hono(); + +// Global middleware +// TODO: restrict CORS origin to actual deployment domain when auth is implemented +app.use('*', cors()); +app.use('*', async (c, next) => { + const db = drizzle(c.env.DB); + c.set('db', db); + return next(); +}); + +// File serving (no auth required for public files, EXIF stripped) +app.route('/', fileServingRoutes); + +// Status endpoint (no auth) +app.route('/api', statusRoutes); + +// Auth-protected routes +// TODO: replace x-user-did header auth with shijun's CF auth SDK when ready +app.use('/api/*', authMiddleware); +app.route('/api', uploadRoutes); +app.route('/api', folderRoutes); +app.route('/api', unsplashRoutes); + +// AI Image — proxy to AIGNE Hub (same flow as original blocklet version) +const AIGNE_HUB_DID = 'z8ia3xzq2tMq8CRHfaXj1BTYJyYnEcHbqP8cJ'; + +let aigneHubMountPoint: string | null = null; + +async function getAigneHubMountPoint(env: Env): Promise { + if (aigneHubMountPoint) return aigneHubMountPoint; + const hubBase = env.AIGNE_HUB_URL || 'https://hub.aigne.io'; + const res = await fetch(`${hubBase}/__blocklet__.js?type=json`); + if (!res.ok) throw new Error(`AIGNE Hub fetch failed: ${res.status}`); + const blocklet: any = await res.json(); + const comp = (blocklet?.componentMountPoints || []).find((m: any) => m.did === AIGNE_HUB_DID); + if (!comp) throw new Error('AIGNE Hub component not found'); + aigneHubMountPoint = `${hubBase}${comp.mountPoint}`; + return aigneHubMountPoint; +} + +app.get('/api/image/models', async (c) => { + const hubUrl = await getAigneHubMountPoint(c.env); + const apiKey = c.env.AIGNE_HUB_API_KEY || ''; + const res = await fetch(`${hubUrl}/api/ai/models?type=image`, { + headers: { Authorization: `Bearer ${apiKey}` }, + }); + const data = await res.json(); + return c.json(data, res.status as any); +}); + +app.post('/api/image/generations', async (c) => { + const { prompt, number = 1, model = 'dall-e-2', ...rest } = await c.req.json(); + const hubUrl = await getAigneHubMountPoint(c.env); + const res = await fetch(`${hubUrl}/api/v2/image`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'x-user-did': c.get('user')?.id || '', + Authorization: `Bearer ${c.env.AIGNE_HUB_API_KEY || ''}`, + }, + body: JSON.stringify({ + input: { + ...rest, + prompt, + n: parseInt(String(number), 10), + modelOptions: { model }, + outputFileType: 'url', + }, + }), + }); + const data: any = await res.json(); + return c.json(data, res.status as any); +}); + +// Health check +app.get('/health', (c) => c.json({ status: 'ok', version: '1.0.0' })); + +// SPA fallback — non-API routes return index.html for client-side routing +app.notFound(async (c) => { + const path = new URL(c.req.url).pathname; + if (path.startsWith('/api/') || path.startsWith('/health')) { + return c.json({ error: 'Not Found' }, 404); + } + try { + const assets = (c.env as any).ASSETS; + if (assets) { + return assets.fetch(new Request(new URL('/', c.req.url))); + } + } catch {} + return c.json({ error: 'Not Found' }, 404); +}); + +// Error handler +app.onError((err, c) => { + console.error('Unhandled error:', err); + return c.json({ error: err.message || 'Internal Server Error' }, 500); +}); + +export default { + fetch: app.fetch, + + async scheduled(_event: ScheduledEvent, env: Env, ctx: ExecutionContext) { + ctx.waitUntil(cleanupExpiredSessions(env)); + }, +}; diff --git a/cloudflare/tsconfig.json b/cloudflare/tsconfig.json new file mode 100644 index 0000000..e46355c --- /dev/null +++ b/cloudflare/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + "target": "ESNext", + "module": "ESNext", + "moduleResolution": "bundler", + "lib": ["ESNext"], + "types": ["@cloudflare/workers-types"], + "strict": false, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "isolatedModules": true, + "noEmit": true, + "jsx": "react-jsx", + "jsxImportSource": "hono/jsx", + "paths": { + "@/*": ["./src/*"] + } + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/cloudflare/vitest.config.ts b/cloudflare/vitest.config.ts new file mode 100644 index 0000000..eae12be --- /dev/null +++ b/cloudflare/vitest.config.ts @@ -0,0 +1,7 @@ +import { defineConfig } from 'vitest/config'; + +export default defineConfig({ + test: { + include: ['src/__tests__/utils.test.ts'], + }, +}); diff --git a/cloudflare/wrangler.toml b/cloudflare/wrangler.toml new file mode 100644 index 0000000..13bbf28 --- /dev/null +++ b/cloudflare/wrangler.toml @@ -0,0 +1,45 @@ +name = "media-kit" +main = "src/worker.ts" +compatibility_date = "2024-12-01" +compatibility_flags = ["nodejs_compat"] + +# Serve frontend static assets (vite build output) +[assets] +directory = "./public" +not_found_handling = "single-page-application" +binding = "ASSETS" + +[vars] +ENVIRONMENT = "production" +MAX_UPLOAD_SIZE = "500MB" +ALLOWED_FILE_TYPES = ".jpeg,.png,.gif,.svg,.webp,.bmp,.ico" +USE_AI_IMAGE = "true" +ADMIN_DIDS = "did:abt:default-uploader" +AIGNE_HUB_URL = "https://hub.aigne.io" + +[[r2_buckets]] +binding = "R2_UPLOADS" +bucket_name = "media-kit-uploads" + +[[d1_databases]] +binding = "DB" +database_name = "media-kit-db" +database_id = "d0ab97f4-ddf7-4388-b91f-be8427cd0c10" +migrations_dir = "migrations" + +[triggers] +crons = ["0 * * * *"] + +# Secrets (wrangler secret put): +# R2_ACCESS_KEY_ID, R2_SECRET_ACCESS_KEY +# R2_ORIGIN_DOMAIN (R2 public domain, restricted by IP Access Rule) +# UNSPLASH_KEY, UNSPLASH_SECRET +# CF_ACCOUNT_ID +# AIGNE_HUB_API_KEY +# AIGNE_HUB_URL can also be set as secret to override the default + +[env.staging] +name = "media-kit-staging" +[env.staging.vars] +ENVIRONMENT = "staging" +ADMIN_DIDS = "did:abt:default-uploader" diff --git a/packages/uploader/src/react/plugins/ai-image/show-panel/output/index.tsx b/packages/uploader/src/react/plugins/ai-image/show-panel/output/index.tsx index 8e67456..a3c40bb 100644 --- a/packages/uploader/src/react/plugins/ai-image/show-panel/output/index.tsx +++ b/packages/uploader/src/react/plugins/ai-image/show-panel/output/index.tsx @@ -76,8 +76,10 @@ export default function Output({ const res = await handleApi({ ...options, outputFileType: 'file' }); if (res.images) { const list: FileContent[] = res.images || []; - const arr = list.map((item) => ({ - src: `data:image/png;base64,${item.data}`, + const arr = list.map((item: FileContent & { + url?: string; + }) => ({ + src: item?.url || `data:image/png;base64,${item.data}`, alt: options.prompt, })); diff --git a/packages/uploader/src/react/plugins/presigned-upload.ts b/packages/uploader/src/react/plugins/presigned-upload.ts new file mode 100644 index 0000000..9a32980 --- /dev/null +++ b/packages/uploader/src/react/plugins/presigned-upload.ts @@ -0,0 +1,291 @@ +/** + * Presigned Upload Plugin for Uppy + * + * Implements presigned URL upload flow as an alternative to TUS protocol. + * Used when the backend returns uploadMode: 'presigned' from /api/uploader/status. + * + * Flow: + * 1. POST /uploads/check → dedup check (size + ext) + * 2. POST /uploads/presign → get presigned URL or multipart session + * 3. PUT presignedUrl → direct upload to R2/S3 + * 4. POST /uploads/confirm → confirm and get upload record + * + * For large files (>= multipart threshold): + * 2b. POST /uploads/presign → get multipart session + * 3b. For each part: + * POST /uploads/multipart/part-url → get part presigned URL + * PUT partUrl → upload part + * 3c. POST /uploads/multipart/complete → assemble parts + * 4. POST /uploads/confirm → confirm + */ + +import { BasePlugin } from '@uppy/core'; +import Cookie from 'js-cookie'; + +// @ts-ignore - getExt is exported from utils +import { getExt } from '../../utils'; + +interface PresignedUploadOptions { + id?: string; + apiBase: string; // e.g. '/api' or full URL + headers?: Record; +} + +export default class PresignedUploadPlugin extends BasePlugin { + declare opts: PresignedUploadOptions; + + private boundHandleUpload: (fileIDs: string[]) => Promise; + + constructor(uppy: any, opts: PresignedUploadOptions) { + super(uppy, opts); + this.id = opts.id || 'PresignedUpload'; + this.type = 'uploader'; + this.opts = opts; + this.boundHandleUpload = this.handleUpload.bind(this); + } + + install() { + this.uppy.addUploader(this.boundHandleUpload); + } + + uninstall() { + this.uppy.removeUploader(this.boundHandleUpload); + } + + private getHeaders(): Record { + const headers: Record = { + 'Content-Type': 'application/json', + ...(this.opts.headers || {}), + }; + const csrfToken = Cookie.get('x-csrf-token'); + if (csrfToken) { + headers['x-csrf-token'] = csrfToken; + } + // @ts-ignore + const componentDid = window?.uploaderComponentId || window?.blocklet?.componentId; + if (componentDid) { + headers['x-component-did'] = (componentDid || '').split('/').pop() || ''; + } + return headers; + } + + private async apiCall(path: string, body: any): Promise { + const url = `${this.opts.apiBase}${path}`; + const res = await fetch(url, { + method: 'POST', + headers: this.getHeaders(), + body: JSON.stringify(body), + credentials: 'include', + }); + if (!res.ok) { + const error = await res.json().catch(() => ({ error: res.statusText })); + throw new Error(error.error || `Upload failed: ${res.status}`); + } + return res.json(); + } + + private async handleUpload(fileIDs: string[]): Promise { + const files = fileIDs.map((id) => this.uppy.getFile(id)); + + for (const file of files) { + try { + await this.uploadFile(file); + } catch (err: any) { + this.uppy.log(`[PresignedUpload] Error uploading ${file.name}: ${err.message}`, 'error'); + this.uppy.emit('upload-error', file, err); + } + } + } + + private async uploadFile(file: any): Promise { + const ext = getExt(file) || file.name.split('.').pop() || ''; + const size = file.size || file.data?.size || 0; + const folderId = file.meta?.folderId || ''; + const tags = file.meta?.tags || ''; + + // Step 1: Dedup check + const checkResult = await this.apiCall('/uploads/check', { size, ext: `.${ext}` }); + + let confirmData: any; + + if (checkResult.exists) { + // File already exists — clone it via confirm + confirmData = await this.apiCall('/uploads/confirm', { + existingUploadId: checkResult.uploadId, + originalname: file.name, + mimetype: file.type, + folderId, + tags, + }); + } else { + // Step 2: Get presigned URL + const presignResult = await this.apiCall('/uploads/presign', { + originalname: file.name, + mimetype: file.type, + size, + ext: `.${ext}`, + folderId, + }); + + // Step 3: Upload file + try { + if (presignResult.multipart) { + await this.uploadMultipart(file, presignResult); + } else { + await this.uploadDirect(file, presignResult.presignedUrl); + } + } catch (uploadErr) { + // Abort multipart session on failure to avoid orphaned R2 parts + if (presignResult.multipart) { + await this.apiCall('/uploads/multipart/abort', { sessionId: presignResult.sessionId }).catch(() => {}); + } + throw uploadErr; + } + + // Step 4: Confirm + confirmData = await this.apiCall('/uploads/confirm', { + sessionId: presignResult.sessionId, + originalname: file.name, + mimetype: file.type, + folderId, + tags, + }); + } + + // Build result compatible with TUS flow — use the url from server response directly + const uploadURL = confirmData.url || `/uploads/${confirmData.filename}`; + + const result = { + data: confirmData, + method: 'POST', + url: uploadURL, + status: 200, + headers: {} as Record, + file, + uploadURL, + }; + + // Set file state + this.uppy.setFileState(file.id, { + progress: { + uploadStarted: Date.now(), + uploadComplete: true, + percentage: 100, + bytesUploaded: file.size || file.data?.size || 0, + bytesTotal: file.size || file.data?.size || 0, + }, + responseResult: result, + }); + + // Emit upload-success — the listener in uploader.tsx handles _onUploadFinish and emitUploadSuccess + this.uppy.emit('upload-success', this.uppy.getFile(file.id), { + uploadURL, + status: 200, + body: confirmData, + }); + } + + private async uploadDirect(file: any, presignedUrl: string): Promise { + const xhr = new XMLHttpRequest(); + await new Promise((resolve, reject) => { + xhr.open('PUT', presignedUrl, true); + if (file.type) { + xhr.setRequestHeader('Content-Type', file.type); + } + + xhr.upload.onprogress = (e) => { + if (e.lengthComputable) { + const percentage = Math.round((e.loaded / e.total) * 100); + this.uppy.setFileState(file.id, { + progress: { + uploadStarted: Date.now(), + uploadComplete: false, + percentage, + bytesUploaded: e.loaded, + bytesTotal: e.total, + }, + }); + // @ts-ignore + this.uppy.calculateTotalProgress?.(); + this.uppy.emit('upload-progress', this.uppy.getFile(file.id), { + uploader: this, + bytesUploaded: e.loaded, + bytesTotal: e.total, + }); + } + }; + + xhr.onload = () => { + if (xhr.status >= 200 && xhr.status < 300) { + resolve(); + } else { + reject(new Error(`Direct upload failed: ${xhr.status} ${xhr.statusText}`)); + } + }; + + xhr.onerror = () => reject(new Error('Network error during upload')); + xhr.onabort = () => reject(new Error('Upload aborted')); + + xhr.send(file.data); + }); + } + + private async uploadMultipart(file: any, presignResult: any): Promise { + const { sessionId, partSize, partCount } = presignResult; + const size = file.size || file.data?.size || 0; + const parts: Array<{ partNumber: number; etag: string }> = []; + + let totalUploaded = 0; + + for (let i = 0; i < partCount; i++) { + const partNumber = i + 1; + const start = i * partSize; + const end = Math.min(start + partSize, size); + const partBlob = file.data.slice(start, end); + + // Get presigned URL for this part + const { presignedUrl } = await this.apiCall('/uploads/multipart/part-url', { + sessionId, + partNumber, + }); + + // Upload part + const res = await fetch(presignedUrl, { + method: 'PUT', + body: partBlob, + }); + + if (!res.ok) { + throw new Error(`Part ${partNumber} upload failed: ${res.status}`); + } + + const etag = res.headers.get('ETag') || ''; + parts.push({ partNumber, etag: etag.replace(/"/g, '') }); + + totalUploaded += end - start; + const percentage = Math.round((totalUploaded / size) * 100); + this.uppy.setFileState(file.id, { + progress: { + uploadStarted: Date.now(), + uploadComplete: false, + percentage, + bytesUploaded: totalUploaded, + bytesTotal: size, + }, + }); + // @ts-ignore + this.uppy.calculateTotalProgress?.(); + this.uppy.emit('upload-progress', this.uppy.getFile(file.id), { + uploader: this, + bytesUploaded: totalUploaded, + bytesTotal: size, + }); + } + + // Complete multipart upload + await this.apiCall('/uploads/multipart/complete', { + sessionId, + parts, + }); + } +} diff --git a/packages/uploader/src/react/uploader.tsx b/packages/uploader/src/react/uploader.tsx index b6be5e1..6f8399f 100644 --- a/packages/uploader/src/react/uploader.tsx +++ b/packages/uploader/src/react/uploader.tsx @@ -22,6 +22,8 @@ import DropTarget from '@uppy/drop-target'; import ImageEditor from '@uppy/image-editor'; import ThumbnailGenerator from '@uppy/thumbnail-generator'; import Tus from '@uppy/tus'; +// @ts-ignore +import PresignedUploadPlugin from './plugins/presigned-upload'; import ComponentInstaller from '@blocklet/ui-react/lib/ComponentInstaller'; import mime from 'mime-types'; import xbytes from 'xbytes'; @@ -162,22 +164,42 @@ const getPluginList = (props: any) => { const uploader = ref.current.getUploader(); uploader?.emit('ai-image:selected', data); - data.forEach(({ src: base64, alt }: any, index: number) => { + data.forEach(({ src, alt }: any, index: number) => { const getSliceText = (str: string) => { return str?.length > 16 ? `${str?.slice(0, 8)}...${str?.slice(-4)}` : str; }; - const fileName = `${getSliceText(alt) || getSliceText(base64)}.png`; // must be png - - const formatFile = { - name: fileName, - type: 'image/png', // must be png - data: base64ToFile(base64, fileName), - source: 'AIImage', - isRemote: false, - }; - - uploader?.addFile(formatFile); + const fileName = `${getSliceText(alt) || 'ai-image'}.png`; // must be png + + // Support both URL and base64 data URI + if (src && !src.startsWith('data:')) { + // URL — fetch and convert to File + fetch(src) + .then((res) => res.blob()) + .then((blob) => { + const file = new File([blob], fileName, { type: 'image/png' }); + uploader?.addFile({ + name: fileName, + type: 'image/png', + data: file, + source: 'AIImage', + isRemote: false, + }); + }) + .catch((err) => { + uploader?.log(`[AIImage] Failed to fetch image: ${err.message}`, 'error'); + }); + } else { + // base64 data URI + const formatFile = { + name: fileName, + type: 'image/png', + data: base64ToFile(src, fileName), + source: 'AIImage', + isRemote: false, + }; + uploader?.addFile(formatFile); + } }); }} /> @@ -276,6 +298,7 @@ export function initUploader(props: any) { restrictions, onChange, initialFiles, + uploadMode = 'tus', } = props; const pluginMap = keyBy(pluginList, 'id'); @@ -314,159 +337,191 @@ export function initUploader(props: any) { }, ...coreProps, restrictions, - }).use(Tus, { - chunkSize: 1024 * 1024 * 10, // default chunk size 10MB - removeFingerprintOnSuccess: true, - // docs: https://github.com/tus/tus-js-client/blob/main/docs/api.md - withCredentials: true, - endpoint: uploaderUrl, - - async onBeforeRequest(req, file) { - // @ts-ignore - const { hashFileName, id, meta } = file; + }); - // @ts-ignore - const mockResponse = currentUppy.getFile(id)?.mockResponse || null; - if (req.getMethod() === 'PATCH' && mockResponse) { - // mock response to avoid next step upload - req.send = () => mockResponse; - } + if (uploadMode === 'presigned') { + // Presigned URL upload mode (for CF Workers / R2) + const apiBase = uploaderUrl.replace(/\/uploads\/?$/, ''); + // @ts-ignore + currentUppy.use(PresignedUploadPlugin, { + apiBase, + }); - const ext = getExt(file); - - // put the hash in the header - req.setHeader('x-uploader-file-name', `${hashFileName}`); - req.setHeader('x-uploader-file-id', `${id}`); - req.setHeader('x-uploader-file-ext', `${ext}`); - req.setHeader('x-uploader-base-url', new URL(req.getURL()).pathname); - req.setHeader('x-uploader-endpoint-url', uploaderUrl); - req.setHeader( - 'x-uploader-metadata', - JSON.stringify(meta, (key, value) => { - if (typeof value === 'string') { - return encodeURIComponent(value); - } - return value; - }) - ); - // add csrf token if exist - const csrfToken = Cookie.get('x-csrf-token'); - if (csrfToken) { - req.setHeader('x-csrf-token', csrfToken); - } + // Wire up _onUploadFinish for presigned mode + currentUppy.on('upload-success', async (file: any, response: any) => { + // Skip if this event was already handled (e.g. from TUS or mock) + if (!response.body) return; + + const result = { + data: response.body, + method: 'POST', + url: response.uploadURL || '', + status: 200, + headers: {} as Record, + file, + uploadURL: response.uploadURL || '', + }; + + await _onUploadFinish?.(result); - // @ts-ignore get folderId when upload using - const componentDid = window?.uploaderComponentId || window?.blocklet?.componentId; - if (componentDid) { + // @ts-ignore custom event — mirrors TUS flow's emitUploadSuccess + currentUppy.emitUploadSuccess(file, result); + }); + } else { + // TUS upload mode (default, for Blocklet) + currentUppy.use(Tus, { + chunkSize: 1024 * 1024 * 10, // default chunk size 10MB + removeFingerprintOnSuccess: true, + // docs: https://github.com/tus/tus-js-client/blob/main/docs/api.md + withCredentials: true, + endpoint: uploaderUrl, + + async onBeforeRequest(req, file) { // @ts-ignore - req.setHeader('x-component-did', (componentDid || '').split('/').pop()); - } - }, - onAfterResponse: async (req, res) => { - const result = {} as any; - const xhr = req.getUnderlyingObject(); + const { hashFileName, id, meta } = file; - try { - if (xhr.response) { - result.data = JSON.parse(xhr.response); + // @ts-ignore + const mockResponse = currentUppy.getFile(id)?.mockResponse || null; + if (req.getMethod() === 'PATCH' && mockResponse) { + // mock response to avoid next step upload + req.send = () => mockResponse; } - } catch (error) { - result.data = {}; - } - result.method = req.getMethod().toUpperCase(); - result.url = req.getURL(); - result.status = xhr.status; - // @ts-ignore - result.headers = { - // @ts-ignore - ...req._headers, - }; + const ext = getExt(file); + + // put the hash in the header + req.setHeader('x-uploader-file-name', `${hashFileName}`); + req.setHeader('x-uploader-file-id', `${id}`); + req.setHeader('x-uploader-file-ext', `${ext}`); + req.setHeader('x-uploader-base-url', new URL(req.getURL()).pathname); + req.setHeader('x-uploader-endpoint-url', uploaderUrl); + req.setHeader( + 'x-uploader-metadata', + JSON.stringify(meta, (key, value) => { + if (typeof value === 'string') { + return encodeURIComponent(value); + } + return value; + }) + ); + // add csrf token if exist + const csrfToken = Cookie.get('x-csrf-token'); + if (csrfToken) { + req.setHeader('x-csrf-token', csrfToken); + } - const allResponseHeaders = xhr.getAllResponseHeaders(); + // @ts-ignore get folderId when upload using + const componentDid = window?.uploaderComponentId || window?.blocklet?.componentId; + if (componentDid) { + // @ts-ignore + req.setHeader('x-component-did', (componentDid || '').split('/').pop()); + } + }, + onAfterResponse: async (req, res) => { + const result = {} as any; + const xhr = req.getUnderlyingObject(); - // format headers - if (allResponseHeaders) { - const headers = allResponseHeaders.split('\r\n'); - headers.forEach((item: string) => { - const [key, value] = item.split(': '); - if (key && value) { - result.headers[key] = value; + try { + if (xhr.response) { + result.data = JSON.parse(xhr.response); } - }); - } + } catch (error) { + result.data = {}; + } - const file = currentUppy.getFile(result.headers['x-uploader-file-id']); + result.method = req.getMethod().toUpperCase(); + result.url = req.getURL(); + result.status = xhr.status; + // @ts-ignore + result.headers = { + // @ts-ignore + ...req._headers, + }; - // @ts-ignore - if (req.getMethod() === 'PATCH' && file.mockResponse) { - // mock response do nothing - return; - } + const allResponseHeaders = xhr.getAllResponseHeaders(); - // only call onUploadFinish if it's a PATCH / POST request - if (['PATCH', 'POST'].includes(result.method) && [200, 500].includes(result.status)) { - const isExist = [true, 'true'].includes(result.headers['x-uploader-file-exist']); - const uploadURL = getUrl(result.url, result.headers['x-uploader-file-name']); // upload URL with file name + // format headers + if (allResponseHeaders) { + const headers = allResponseHeaders.split('\r\n'); + headers.forEach((item: string) => { + const [key, value] = item.split(': '); + if (key && value) { + result.headers[key] = value; + } + }); + } - result.file = file; - result.uploadURL = uploadURL; + const file = currentUppy.getFile(result.headers['x-uploader-file-id']); - const responseResult = { - uploadURL, - ...result, - }; + // @ts-ignore + if (req.getMethod() === 'PATCH' && file.mockResponse) { + // mock response do nothing + return; + } - currentUppy.setFileState(file.id, { - responseResult, - }); + // only call onUploadFinish if it's a PATCH / POST request + if (['PATCH', 'POST'].includes(result.method) && [200, 500].includes(result.status)) { + const isExist = [true, 'true'].includes(result.headers['x-uploader-file-exist']); + const uploadURL = getUrl(result.url, result.headers['x-uploader-file-name']); // upload URL with file name - // exist but not upload - if (isExist && file) { - // if POST method check exist - if (result.method === 'POST') { - // set mock response to avoid next step upload - currentUppy.setFileState(file.id, { - mockResponse: res, - }); - } + result.file = file; + result.uploadURL = uploadURL; - // only trigger uppy event when exist - currentUppy.emit('upload-success', currentUppy.getFile(file.id), { - ...responseResult, - body: result.data, + const responseResult = { + uploadURL, + ...result, + }; + + currentUppy.setFileState(file.id, { + responseResult, }); - currentUppy.emit('postprocess-complete', currentUppy.getFile(file.id)); - // @ts-ignore - currentUppy.calculateTotalProgress(); - } + // exist but not upload + if (isExist && file) { + // if POST method check exist + if (result.method === 'POST') { + // set mock response to avoid next step upload + currentUppy.setFileState(file.id, { + mockResponse: res, + }); + } - if (result.status === 200) { - await _onUploadFinish?.(result); + // only trigger uppy event when exist + currentUppy.emit('upload-success', currentUppy.getFile(file.id), { + ...responseResult, + body: result.data, + }); + currentUppy.emit('postprocess-complete', currentUppy.getFile(file.id)); - // @ts-ignore custom event - currentUppy.emitUploadSuccess(file, result); + // @ts-ignore + currentUppy.calculateTotalProgress(); + } + + if (result.status === 200) { + await _onUploadFinish?.(result); + + // @ts-ignore custom event + currentUppy.emitUploadSuccess(file, result); + } } - } - // each time a response is received - if (_onAfterResponse) { - await _onAfterResponse?.(xhr); - } + // each time a response is received + if (_onAfterResponse) { + await _onAfterResponse?.(xhr); + } - const uploadProgressDone = currentUppy.getState().totalProgress === 100; + const uploadProgressDone = currentUppy.getState().totalProgress === 100; - const shouldAutoCloseAfterDropUpload = currentUppy.getFiles().every((item: any) => item.source === 'DropTarget'); + const shouldAutoCloseAfterDropUpload = currentUppy.getFiles().every((item: any) => item.source === 'DropTarget'); - // close uploader when upload progress done and all files are from DropTarget - if (uploadProgressDone && shouldAutoCloseAfterDropUpload) { - currentUppy.close(); - } - }, - ...tusProps, - }); - // .use(GoldenRetriever); + // close uploader when upload progress done and all files are from DropTarget + if (uploadProgressDone && shouldAutoCloseAfterDropUpload) { + currentUppy.close(); + } + }, + ...tusProps, + }); + } const appendUploadIdEvent = ({ fileIDs, id }: { fileIDs: string[]; id: string }) => { fileIDs.forEach((fileId: any) => { @@ -551,6 +606,7 @@ export function Uploader({ availablePluginMap: {} as any, restrictions: cloneDeep(props?.coreProps?.restrictions) || ({} as any), isError: false, + uploadMode: 'tus' as 'tus' | 'presigned', }); const theme = useTheme(); @@ -595,6 +651,7 @@ export function Uploader({ try { await mediaKitApi.get('/api/uploader/status').then(({ data }: any) => { state.availablePluginMap = data.availablePluginMap; + state.uploadMode = data.uploadMode || 'tus'; if (!apiPathProps.disableMediaKitPrefix && isNil(props?.coreProps?.restrictions)) { restrictions = data.restrictions || {}; @@ -733,6 +790,7 @@ export function Uploader({ apiPathProps, pluginList, restrictions: state.restrictions, + uploadMode: state.uploadMode, }); state.uppy.open = open; @@ -832,6 +890,7 @@ export function Uploader({ locale, restrictions: state.restrictions, theme, + uploadMode: state.uploadMode, }), ]); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 3ae1ba0..0cc8942 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -274,6 +274,166 @@ importers: specifier: ^5.2.9 version: 5.2.9 + cloudflare/frontend: + dependencies: + '@emotion/react': + specifier: ^11.14.0 + version: 11.14.0(@types/react@19.2.14)(react@19.1.0) + '@emotion/styled': + specifier: ^11.14.0 + version: 11.14.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0) + '@lottiefiles/react-lottie-player': + specifier: ^3.5.4 + version: 3.5.4(react@19.1.0) + '@mui/icons-material': + specifier: ^7.1.2 + version: 7.1.2(@mui/material@7.1.2(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(@types/react@19.2.14)(react@19.1.0) + '@mui/material': + specifier: ^7.1.2 + version: 7.1.2(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react-dom@19.1.0(react@19.1.0))(react@19.1.0) + '@uppy/core': + specifier: 3.13.1 + version: 3.13.1 + '@uppy/dashboard': + specifier: 3.9.1 + version: 3.9.1(@uppy/core@3.13.1) + '@uppy/drag-drop': + specifier: 3.1.1 + version: 3.1.1(@uppy/core@3.13.1) + '@uppy/drop-target': + specifier: 2.1.0 + version: 2.1.0(@uppy/core@3.13.1) + '@uppy/file-input': + specifier: 3.1.2 + version: 3.1.2(@uppy/core@3.13.1) + '@uppy/golden-retriever': + specifier: 3.2.0 + version: 3.2.0(@uppy/core@3.13.1) + '@uppy/image-editor': + specifier: 2.4.6 + version: 2.4.6(@uppy/core@3.13.1) + '@uppy/locales': + specifier: 3.5.4 + version: 3.5.4 + '@uppy/progress-bar': + specifier: 3.1.1 + version: 3.1.1(@uppy/core@3.13.1) + '@uppy/provider-views': + specifier: 3.13.0 + version: 3.13.0(@uppy/core@3.13.1) + '@uppy/react': + specifier: 3.4.0 + version: 3.4.0(@uppy/core@3.13.1)(@uppy/dashboard@3.9.1(@uppy/core@3.13.1))(@uppy/drag-drop@3.1.1(@uppy/core@3.13.1))(@uppy/file-input@3.1.2(@uppy/core@3.13.1))(@uppy/progress-bar@3.1.1(@uppy/core@3.13.1))(@uppy/status-bar@3.3.3(@uppy/core@3.13.1))(react@19.1.0) + '@uppy/status-bar': + specifier: 3.3.3 + version: 3.3.3(@uppy/core@3.13.1) + '@uppy/tus': + specifier: 3.5.5 + version: 3.5.5(@uppy/core@3.13.1) + '@uppy/unsplash': + specifier: 3.3.1 + version: 3.3.1(@uppy/core@3.13.1) + '@uppy/url': + specifier: 3.6.1 + version: 3.6.1(@uppy/core@3.13.1) + '@uppy/webcam': + specifier: 3.4.2 + version: 3.4.2(@uppy/core@3.13.1) + ahooks: + specifier: ^3.8.1 + version: 3.8.5(react@19.1.0) + axios: + specifier: ^1.7.0 + version: 1.10.0 + copy-to-clipboard: + specifier: ^3.3.3 + version: 3.3.3 + dompurify: + specifier: ^3.2.2 + version: 3.2.6 + exifr: + specifier: ^7.1.3 + version: 7.1.3 + fflate: + specifier: ^0.8.2 + version: 0.8.2 + js-cookie: + specifier: ^3.0.5 + version: 3.0.5 + lodash: + specifier: ^4.17.21 + version: 4.17.21 + micromatch: + specifier: ^4.0.8 + version: 4.0.8 + mime-types: + specifier: ^2.1.35 + version: 2.1.35 + path-browserify: + specifier: ^1.0.1 + version: 1.0.1 + preact: + specifier: 10.20.1 + version: 10.20.1 + pretty-bytes: + specifier: ^6.1.0 + version: 6.1.1 + prop-types: + specifier: ^15.8.1 + version: 15.8.1 + react: + specifier: ^19.0.0 + version: 19.1.0 + react-dom: + specifier: ^19.0.0 + version: 19.1.0(react@19.1.0) + react-player: + specifier: ^2.16.0 + version: 2.16.0(react@19.1.0) + react-router-dom: + specifier: ^6.28.0 + version: 6.28.0(react-dom@19.1.0(react@19.1.0))(react@19.1.0) + spark-md5: + specifier: ^3.0.2 + version: 3.0.2 + timeago.js: + specifier: ^4.0.2 + version: 4.0.2 + ufo: + specifier: ^1.6.1 + version: 1.6.1 + url-join: + specifier: ^4.0.1 + version: 4.0.1 + wolfy87-eventemitter: + specifier: ^5.2.9 + version: 5.2.9 + xbytes: + specifier: ^1.9.1 + version: 1.9.1 + devDependencies: + '@types/js-cookie': + specifier: ^3.0.6 + version: 3.0.6 + '@types/lodash': + specifier: ^4.17.0 + version: 4.17.13 + '@types/react': + specifier: ^19.0.0 + version: 19.2.14 + '@types/react-dom': + specifier: ^19.0.0 + version: 19.2.3(@types/react@19.2.14) + '@vitejs/plugin-react': + specifier: ^4.3.0 + version: 4.6.0(vite@6.4.1(@types/node@22.10.1)(jiti@2.6.1)(tsx@4.19.2)(yaml@2.8.1)) + typescript: + specifier: ^5.5.0 + version: 5.7.2 + vite: + specifier: ^6.0.0 + version: 6.4.1(@types/node@22.10.1)(jiti@2.6.1)(tsx@4.19.2)(yaml@2.8.1) + packages/uploader: dependencies: '@blocklet/ui-react': @@ -3878,6 +4038,9 @@ packages: '@types/js-cookie@2.2.7': resolution: {integrity: sha512-aLkWa0C0vO5b4Sr798E26QgOkss68Un0bLjs7u9qxzPT5CG+8DuNTffWES58YzJs3hrVAOs1wonycqEBqNJubA==} + '@types/js-cookie@3.0.6': + resolution: {integrity: sha512-wkw9yd1kEXOPnvEeEV1Go1MmxtBJL0RR79aOTAApecWFVu7w0NNXNqhcWgvw2YgZDYadliXkl14pa3WXw5jlCQ==} + '@types/json5@0.0.29': resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==} @@ -3950,6 +4113,11 @@ packages: '@types/react-dom@18.3.1': resolution: {integrity: sha512-qW1Mfv8taImTthu4KoXgDfLuk4bydU6Q/TkADnDWWHwi4NX4BR+LWfTp2sVmTqRrsHvyDDTelgelxJ+SsejKKQ==} + '@types/react-dom@19.2.3': + resolution: {integrity: sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==} + peerDependencies: + '@types/react': ^19.2.0 + '@types/react-transition-group@4.4.12': resolution: {integrity: sha512-8TV6R3h2j7a91c+1DXdJi3Syo69zzIZbz7Lg5tORM5LEJG7X/E6a1V3drRyBRZq7/utz7A+c4OgYLiLcYGHG6w==} peerDependencies: @@ -3958,6 +4126,9 @@ packages: '@types/react@18.3.12': resolution: {integrity: sha512-D2wOSq/d6Agt28q7rSI3jhU7G6aiuzljDGZ2hTZHIkrTLUI+AF3WMeKkEZ9nN2fkBAlcktT6vcZjDFiIhMYEQw==} + '@types/react@19.2.14': + resolution: {integrity: sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==} + '@types/resolve@1.20.2': resolution: {integrity: sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==} @@ -5427,6 +5598,9 @@ packages: csstype@3.1.3: resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==} + csstype@3.2.3: + resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==} + cuint@0.2.2: resolution: {integrity: sha512-d4ZVpCW31eWwCMe1YT3ur7mUDnTXbgwyzaL320DrcRT45rfjYxkt5QWLrmOJ+/UEAI2+fQgKe/fCjR8l4TpRgw==} @@ -10739,6 +10913,46 @@ packages: peerDependencies: vite: '>=2.6.0' + vite@6.4.1: + resolution: {integrity: sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==} + engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} + hasBin: true + peerDependencies: + '@types/node': ^18.0.0 || ^20.0.0 || >=22.0.0 + jiti: '>=1.21.0' + less: '*' + lightningcss: ^1.21.0 + sass: '*' + sass-embedded: '*' + stylus: '*' + sugarss: '*' + terser: ^5.16.0 + tsx: ^4.8.1 + yaml: ^2.4.2 + peerDependenciesMeta: + '@types/node': + optional: true + jiti: + optional: true + less: + optional: true + lightningcss: + optional: true + sass: + optional: true + sass-embedded: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + tsx: + optional: true + yaml: + optional: true + vite@7.0.0: resolution: {integrity: sha512-ixXJB1YRgDIw2OszKQS9WxGHKwLdCsbQNkpJN171udl6szi/rIySHL6/Os3s2+oE4P/FLD4dxg4mD7Wust+u5g==} engines: {node: ^20.19.0 || >=22.12.0} @@ -14083,6 +14297,22 @@ snapshots: transitivePeerDependencies: - supports-color + '@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0)': + dependencies: + '@babel/runtime': 7.28.4 + '@emotion/babel-plugin': 11.13.5 + '@emotion/cache': 11.14.0 + '@emotion/serialize': 1.3.3 + '@emotion/use-insertion-effect-with-fallbacks': 1.2.0(react@19.1.0) + '@emotion/utils': 1.4.2 + '@emotion/weak-memoize': 0.4.0 + hoist-non-react-statics: 3.3.2 + react: 19.1.0 + optionalDependencies: + '@types/react': 19.2.14 + transitivePeerDependencies: + - supports-color + '@emotion/serialize@1.3.3': dependencies: '@emotion/hash': 0.9.2 @@ -14108,6 +14338,21 @@ snapshots: transitivePeerDependencies: - supports-color + '@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0)': + dependencies: + '@babel/runtime': 7.28.4 + '@emotion/babel-plugin': 11.13.5 + '@emotion/is-prop-valid': 1.3.1 + '@emotion/react': 11.14.0(@types/react@19.2.14)(react@19.1.0) + '@emotion/serialize': 1.3.3 + '@emotion/use-insertion-effect-with-fallbacks': 1.2.0(react@19.1.0) + '@emotion/utils': 1.4.2 + react: 19.1.0 + optionalDependencies: + '@types/react': 19.2.14 + transitivePeerDependencies: + - supports-color + '@emotion/unitless@0.10.0': {} '@emotion/use-insertion-effect-with-fallbacks@1.2.0(react@19.1.0)': @@ -15020,6 +15265,14 @@ snapshots: optionalDependencies: '@types/react': 18.3.12 + '@mui/icons-material@7.1.2(@mui/material@7.1.2(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(@types/react@19.2.14)(react@19.1.0)': + dependencies: + '@babel/runtime': 7.28.4 + '@mui/material': 7.1.2(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react-dom@19.1.0(react@19.1.0))(react@19.1.0) + react: 19.1.0 + optionalDependencies: + '@types/react': 19.2.14 + '@mui/lab@7.0.0-beta.14(@emotion/react@11.14.0(@types/react@18.3.12)(react@19.1.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@18.3.12)(react@19.1.0))(@types/react@18.3.12)(react@19.1.0))(@mui/material@7.1.2(@emotion/react@11.14.0(@types/react@18.3.12)(react@19.1.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@18.3.12)(react@19.1.0))(@types/react@18.3.12)(react@19.1.0))(@types/react@18.3.12)(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(@types/react@18.3.12)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)': dependencies: '@babel/runtime': 7.28.4 @@ -15073,6 +15326,27 @@ snapshots: '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@18.3.12)(react@19.1.0))(@types/react@18.3.12)(react@19.1.0) '@types/react': 18.3.12 + '@mui/material@7.1.2(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react-dom@19.1.0(react@19.1.0))(react@19.1.0)': + dependencies: + '@babel/runtime': 7.28.4 + '@mui/core-downloads-tracker': 7.1.2 + '@mui/system': 7.1.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0) + '@mui/types': 7.4.8(@types/react@19.2.14) + '@mui/utils': 7.3.5(@types/react@19.2.14)(react@19.1.0) + '@popperjs/core': 2.11.8 + '@types/react-transition-group': 4.4.12(@types/react@19.2.14) + clsx: 2.1.1 + csstype: 3.1.3 + prop-types: 15.8.1 + react: 19.1.0 + react-dom: 19.1.0(react@19.1.0) + react-is: 19.2.0 + react-transition-group: 4.4.5(react-dom@19.1.0(react@19.1.0))(react@19.1.0) + optionalDependencies: + '@emotion/react': 11.14.0(@types/react@19.2.14)(react@19.1.0) + '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0) + '@types/react': 19.2.14 + '@mui/private-theming@7.1.1(@types/react@18.3.12)(react@19.1.0)': dependencies: '@babel/runtime': 7.28.4 @@ -15082,6 +15356,15 @@ snapshots: optionalDependencies: '@types/react': 18.3.12 + '@mui/private-theming@7.1.1(@types/react@19.2.14)(react@19.1.0)': + dependencies: + '@babel/runtime': 7.28.4 + '@mui/utils': 7.3.5(@types/react@19.2.14)(react@19.1.0) + prop-types: 15.8.1 + react: 19.1.0 + optionalDependencies: + '@types/react': 19.2.14 + '@mui/styled-engine@7.1.1(@emotion/react@11.14.0(@types/react@18.3.12)(react@19.1.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@18.3.12)(react@19.1.0))(@types/react@18.3.12)(react@19.1.0))(react@19.1.0)': dependencies: '@babel/runtime': 7.28.4 @@ -15095,6 +15378,19 @@ snapshots: '@emotion/react': 11.14.0(@types/react@18.3.12)(react@19.1.0) '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@18.3.12)(react@19.1.0))(@types/react@18.3.12)(react@19.1.0) + '@mui/styled-engine@7.1.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0))(react@19.1.0)': + dependencies: + '@babel/runtime': 7.28.4 + '@emotion/cache': 11.14.0 + '@emotion/serialize': 1.3.3 + '@emotion/sheet': 1.4.0 + csstype: 3.1.3 + prop-types: 15.8.1 + react: 19.1.0 + optionalDependencies: + '@emotion/react': 11.14.0(@types/react@19.2.14)(react@19.1.0) + '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0) + '@mui/system@7.1.1(@emotion/react@11.14.0(@types/react@18.3.12)(react@19.1.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@18.3.12)(react@19.1.0))(@types/react@18.3.12)(react@19.1.0))(@types/react@18.3.12)(react@19.1.0)': dependencies: '@babel/runtime': 7.28.4 @@ -15111,12 +15407,34 @@ snapshots: '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@18.3.12)(react@19.1.0))(@types/react@18.3.12)(react@19.1.0) '@types/react': 18.3.12 + '@mui/system@7.1.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0)': + dependencies: + '@babel/runtime': 7.28.4 + '@mui/private-theming': 7.1.1(@types/react@19.2.14)(react@19.1.0) + '@mui/styled-engine': 7.1.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0))(react@19.1.0) + '@mui/types': 7.4.8(@types/react@19.2.14) + '@mui/utils': 7.3.5(@types/react@19.2.14)(react@19.1.0) + clsx: 2.1.1 + csstype: 3.1.3 + prop-types: 15.8.1 + react: 19.1.0 + optionalDependencies: + '@emotion/react': 11.14.0(@types/react@19.2.14)(react@19.1.0) + '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.2.14)(react@19.1.0))(@types/react@19.2.14)(react@19.1.0) + '@types/react': 19.2.14 + '@mui/types@7.4.8(@types/react@18.3.12)': dependencies: '@babel/runtime': 7.28.4 optionalDependencies: '@types/react': 18.3.12 + '@mui/types@7.4.8(@types/react@19.2.14)': + dependencies: + '@babel/runtime': 7.28.4 + optionalDependencies: + '@types/react': 19.2.14 + '@mui/utils@7.3.5(@types/react@18.3.12)(react@19.1.0)': dependencies: '@babel/runtime': 7.28.4 @@ -15129,6 +15447,18 @@ snapshots: optionalDependencies: '@types/react': 18.3.12 + '@mui/utils@7.3.5(@types/react@19.2.14)(react@19.1.0)': + dependencies: + '@babel/runtime': 7.28.4 + '@mui/types': 7.4.8(@types/react@19.2.14) + '@types/prop-types': 15.7.15 + clsx: 2.1.1 + prop-types: 15.8.1 + react: 19.1.0 + react-is: 19.2.0 + optionalDependencies: + '@types/react': 19.2.14 + '@nedb/binary-search-tree@2.1.5': {} '@nedb/core@2.1.5': @@ -16812,6 +17142,8 @@ snapshots: '@types/js-cookie@2.2.7': {} + '@types/js-cookie@3.0.6': {} + '@types/json5@0.0.29': {} '@types/jsonfile@6.1.4': @@ -16888,15 +17220,27 @@ snapshots: dependencies: '@types/react': 18.3.12 + '@types/react-dom@19.2.3(@types/react@19.2.14)': + dependencies: + '@types/react': 19.2.14 + '@types/react-transition-group@4.4.12(@types/react@18.3.12)': dependencies: '@types/react': 18.3.12 + '@types/react-transition-group@4.4.12(@types/react@19.2.14)': + dependencies: + '@types/react': 19.2.14 + '@types/react@18.3.12': dependencies: '@types/prop-types': 15.7.15 csstype: 3.1.3 + '@types/react@19.2.14': + dependencies: + csstype: 3.2.3 + '@types/resolve@1.20.2': {} '@types/responselike@1.0.3': @@ -17295,6 +17639,18 @@ snapshots: is-mobile: 3.1.1 preact: 10.20.1 + '@vitejs/plugin-react@4.6.0(vite@6.4.1(@types/node@22.10.1)(jiti@2.6.1)(tsx@4.19.2)(yaml@2.8.1))': + dependencies: + '@babel/core': 7.27.7 + '@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.27.7) + '@babel/plugin-transform-react-jsx-source': 7.27.1(@babel/core@7.27.7) + '@rolldown/pluginutils': 1.0.0-beta.19 + '@types/babel__core': 7.20.5 + react-refresh: 0.17.0 + vite: 6.4.1(@types/node@22.10.1)(jiti@2.6.1)(tsx@4.19.2)(yaml@2.8.1) + transitivePeerDependencies: + - supports-color + '@vitejs/plugin-react@4.6.0(vite@7.0.0(@types/node@22.10.1)(jiti@2.6.1)(tsx@4.19.2)(yaml@2.8.1))': dependencies: '@babel/core': 7.27.7 @@ -17729,7 +18085,7 @@ snapshots: axios@1.10.0: dependencies: - follow-redirects: 1.15.9(debug@4.3.7) + follow-redirects: 1.15.9 form-data: 4.0.2 proxy-from-env: 1.1.0 transitivePeerDependencies: @@ -18803,6 +19159,8 @@ snapshots: csstype@3.1.3: {} + csstype@3.2.3: {} + cuint@0.2.2: {} culvert@0.1.2: {} @@ -20148,6 +20506,8 @@ snapshots: dependencies: from2: 2.3.0 + follow-redirects@1.15.9: {} + follow-redirects@1.15.9(debug@4.3.7): optionalDependencies: debug: 4.3.7 @@ -25103,6 +25463,21 @@ snapshots: - supports-color - typescript + vite@6.4.1(@types/node@22.10.1)(jiti@2.6.1)(tsx@4.19.2)(yaml@2.8.1): + dependencies: + esbuild: 0.25.5 + fdir: 6.5.0(picomatch@4.0.3) + picomatch: 4.0.3 + postcss: 8.5.6 + rollup: 4.44.1 + tinyglobby: 0.2.15 + optionalDependencies: + '@types/node': 22.10.1 + fsevents: 2.3.3 + jiti: 2.6.1 + tsx: 4.19.2 + yaml: 2.8.1 + vite@7.0.0(@types/node@20.17.9)(jiti@2.6.1)(tsx@4.19.2)(yaml@2.8.1): dependencies: esbuild: 0.25.5 From e94766a8254f9f95dc0a685062089c68bacad794 Mon Sep 17 00:00:00 2001 From: xiaomoziyi <823346486@qq.com> Date: Tue, 17 Mar 2026 22:09:27 +0800 Subject: [PATCH 02/21] fix: improve CF Workers implementation based on review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Code improvements: - Multipart: use R2 binding for create/complete/abort instead of S3 API (zero latency, no credential dependency, only listParts still uses S3) - proxy-put: stream request body directly to R2 instead of arrayBuffer() (avoids 128MB memory ceiling for large files in dev mode) - Cleanup: delete temp R2 objects for expired single-file sessions (previously only multipart sessions were cleaned up) - SVG sanitization: add + + + ); + } -function SessionProvider({ children }: { children: ReactNode; serviceHost?: string; protectedRoutes?: string[] }) { - return {children}; + return {children}; } function SessionConsumer({ children }: { children: (session: any) => ReactNode }) { @@ -33,7 +106,11 @@ function SessionConsumer({ children }: { children: (session: any) => ReactNode } function withSession(Component: any) { return function WrappedComponent(props: any) { - return ; + return ( + + {(ctx: any) => } + + ); }; } diff --git a/cloudflare/frontend/vite.config.ts b/cloudflare/frontend/vite.config.ts index 5452b5c..c312fee 100644 --- a/cloudflare/frontend/vite.config.ts +++ b/cloudflare/frontend/vite.config.ts @@ -20,6 +20,8 @@ export default defineConfig({ '/api': 'http://localhost:8787', '/uploads': 'http://localhost:8787', '/health': 'http://localhost:8787', + '/__blocklet__.js': 'http://localhost:8787', + '/.well-known/service': 'http://localhost:8787', }, fs: { strict: false, diff --git a/cloudflare/src/__tests__/worker.integration.ts b/cloudflare/src/__tests__/worker.integration.ts index 96cd2bf..f7da270 100644 --- a/cloudflare/src/__tests__/worker.integration.ts +++ b/cloudflare/src/__tests__/worker.integration.ts @@ -1,5 +1,5 @@ import { describe, it, expect } from 'vitest'; -import { env, SELF } from 'cloudflare:test'; +import { SELF } from 'cloudflare:test'; describe('Worker integration', () => { it('responds to health check', async () => { @@ -32,59 +32,22 @@ describe('Worker integration', () => { expect(res.status).toBe(404); }); - it('check endpoint returns exists:false for empty DB', async () => { - const res = await SELF.fetch('https://media-kit.test/api/uploads/check', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ size: 1024, ext: '.png' }), - }); - expect(res.status).toBe(200); - const body = await res.json() as any; - expect(body.exists).toBe(false); - }); - - it('presign endpoint returns sessionId and presignedUrl', async () => { - const res = await SELF.fetch('https://media-kit.test/api/uploads/presign', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - originalname: 'test.png', - mimetype: 'image/png', - size: 1024, - ext: '.png', - }), - }); - expect(res.status).toBe(200); - const body = await res.json() as any; - expect(body).toHaveProperty('sessionId'); - // presignedUrl may fail without real R2 credentials, but sessionId should exist - expect(typeof body.sessionId).toBe('string'); - }); - - it('list uploads returns paginated response', async () => { + it('auth-protected routes require authentication (no AUTH_SERVICE returns 503)', async () => { + // Without AUTH_SERVICE binding, auth middleware returns 503 const res = await SELF.fetch('https://media-kit.test/api/uploads'); - expect(res.status).toBe(200); - const body = await res.json() as any; - expect(body).toHaveProperty('uploads'); - expect(body).toHaveProperty('folders'); - expect(body).toHaveProperty('total'); - expect(body).toHaveProperty('page'); - expect(body).toHaveProperty('pageSize'); + expect([401, 503]).toContain(res.status); }); - it('auth middleware sets default DID', async () => { - const res = await SELF.fetch('https://media-kit.test/api/uploads'); + it('returns __blocklet__.js with app metadata', async () => { + const res = await SELF.fetch('https://media-kit.test/__blocklet__.js?type=json'); expect(res.status).toBe(200); - // If auth failed, we'd get 401/403 + const body = await res.json() as any; + expect(body).toHaveProperty('appName'); + expect(body.cloudflareWorker).toBe(true); }); - it('admin routes reject non-admin users', async () => { - // Default DID is in ADMIN_DIDS, so it IS admin - // A different DID should be rejected - const res = await SELF.fetch('https://media-kit.test/api/uploads/some-id', { - method: 'DELETE', - headers: { 'x-user-did': 'did:abt:non-admin-user' }, - }); - expect(res.status).toBe(403); + it('DID auth proxy returns 503 without AUTH_SERVICE', async () => { + const res = await SELF.fetch('https://media-kit.test/api/did/session'); + expect(res.status).toBe(503); }); }); diff --git a/cloudflare/src/middleware/auth.ts b/cloudflare/src/middleware/auth.ts index e370f5d..a448347 100644 --- a/cloudflare/src/middleware/auth.ts +++ b/cloudflare/src/middleware/auth.ts @@ -1,24 +1,101 @@ import { Context, Next } from 'hono'; -import type { HonoEnv } from '../types'; +import type { HonoEnv, CallerIdentityDTO } from '../types'; -const DEFAULT_DID = 'did:abt:default-uploader'; +// === JWT identity cache — avoid repeated AUTH_SERVICE RPC for the same token === +const JWT_CACHE_MAX_SIZE = 1000; +const JWT_CACHE_DEFAULT_TTL_MS = 5 * 60 * 1000; // 5 minutes fallback +const jwtIdentityCache = new Map(); + +function getJwtExpiry(jwt: string): number | null { + try { + const parts = jwt.split('.'); + if (parts.length !== 3) return null; + const payload = parts[1].replace(/-/g, '+').replace(/_/g, '/'); + const decoded = JSON.parse(atob(payload)); + if (typeof decoded.exp === 'number') { + return decoded.exp * 1000; + } + } catch { + // Fall through — use default TTL + } + return null; +} + +function getCachedIdentity(key: string): CallerIdentityDTO | null { + const entry = jwtIdentityCache.get(key); + if (!entry) return null; + if (Date.now() >= entry.expiresAt) { + jwtIdentityCache.delete(key); + return null; + } + return entry.identity; +} + +function cacheIdentity(key: string, identity: CallerIdentityDTO): void { + if (jwtIdentityCache.size >= JWT_CACHE_MAX_SIZE) { + const firstKey = jwtIdentityCache.keys().next().value; + if (firstKey) jwtIdentityCache.delete(firstKey); + } + const expiresAt = getJwtExpiry(key) ?? Date.now() + JWT_CACHE_DEFAULT_TTL_MS; + jwtIdentityCache.set(key, { identity, expiresAt }); +} + +// Paths that must bypass auth (login/session/logout are handled by DID auth proxy) +const AUTH_BYPASS_PREFIXES = ['/api/did/', '/api/uploader/']; /** - * Auth middleware. - * - * TODO: replace with shijun's CF auth SDK. - * When auth is ready, upload flow should go through Worker (not presigned URL direct upload) - * to ensure every request is authenticated. + * Auth middleware — resolves caller identity via AUTH_SERVICE RPC (Service Binding to DID service). + * Extracts JWT from login_token cookie or Authorization header, calls resolveIdentity, + * and sets user context for downstream handlers. */ export async function authMiddleware(c: Context, next: Next) { - const userId = c.req.header('x-user-did') || DEFAULT_DID; - const adminDids = (c.env.ADMIN_DIDS || DEFAULT_DID).split(',').map((s) => s.trim()); - const isAdmin = adminDids.includes(userId); - - c.set('user', { - id: userId, - role: isAdmin ? 'admin' : 'member', - }); + // Skip auth for DID login/session routes (they're proxied to AUTH_SERVICE directly) + const path = new URL(c.req.url).pathname; + if (AUTH_BYPASS_PREFIXES.some((p) => path.startsWith(p))) { + return next(); + } + + const authService = c.env.AUTH_SERVICE; + if (!authService || typeof authService.resolveIdentity !== 'function') { + // AUTH_SERVICE not configured — reject request + return c.json({ error: 'Authentication service not available' }, 503); + } + + try { + const cookieHeader = c.req.header('Cookie') || ''; + const match = cookieHeader.match(/(?:^|;\s*)login_token=([^;]*)/); + const jwt = match ? decodeURIComponent(match[1]) : null; + const authHeader = c.req.header('Authorization') || null; + + // Try cache first + const cacheKey = jwt || authHeader; + let caller: CallerIdentityDTO | null = null; + if (cacheKey) { + caller = getCachedIdentity(cacheKey); + } + + if (!caller) { + caller = await authService.resolveIdentity(jwt, authHeader, c.env.APP_PID); + if (caller && cacheKey) { + cacheIdentity(cacheKey, caller); + } + } + + if (!caller) { + return c.json({ error: 'Unauthorized' }, 401); + } + + // Map caller identity to UserContext — role comes entirely from AUTH_SERVICE + const role = caller.role === 'owner' || caller.role === 'admin' ? 'admin' : caller.role || 'member'; + + c.set('user', { + id: caller.did, + role, + }); + } catch (e: any) { + console.error('[Auth] resolveIdentity error:', e?.message || e); + return c.json({ error: 'Authentication failed' }, 401); + } return next(); } diff --git a/cloudflare/src/routes/status.ts b/cloudflare/src/routes/status.ts index 7b98697..7b8638e 100644 --- a/cloudflare/src/routes/status.ts +++ b/cloudflare/src/routes/status.ts @@ -22,6 +22,7 @@ statusRoutes.get('/uploader/status', async (c) => { }, availablePluginMap: { Uploaded: true, + Resources: false, ...(isUnsplashEnabled ? { Unsplash: true } : {}), ...(isAiImageEnabled ? { AIImage: true } : {}), }, diff --git a/cloudflare/src/types.ts b/cloudflare/src/types.ts index 8af69fd..088ef3d 100644 --- a/cloudflare/src/types.ts +++ b/cloudflare/src/types.ts @@ -1,5 +1,16 @@ import type { DrizzleD1Database } from 'drizzle-orm/d1'; +export interface CallerIdentityDTO { + did: string; + pk: string; + displayName: string; + avatar: string; + role: 'owner' | 'admin' | 'member' | 'guest'; + authMethod: 'passkey' | 'did-connect' | 'access-key' | 'oauth' | 'email'; + accessKeyId?: string; + approved: boolean; +} + export interface Env { // D1 Database DB: D1Database; @@ -16,7 +27,29 @@ export interface Env { MAX_UPLOAD_SIZE: string; ALLOWED_FILE_TYPES: string; USE_AI_IMAGE: string; - ADMIN_DIDS: string; + // App identity + APP_SK: string; // 64-byte hex secret key — used to register & derive instance DID + APP_NAME: string; + APP_PID: string; // Derived from APP_SK after registerApp; can also be set explicitly + APP_PREFIX: string; // Mount prefix (e.g. '/media-kit') — empty or '/' means root + // Auth Service (DID Connect via Service Binding) + AUTH_SERVICE: { + fetch: (request: Request | string) => Promise; + resolveIdentity: ( + jwt: string | null, + authorizationHeader: string | null, + instanceDid?: string + ) => Promise; + verify: (jwt: string) => Promise; + verifyFull: (jwt: string) => Promise; + registerApp: (config: { + instanceDid: string; + appSk: string; + appPsk?: string; + appName?: string; + appDescription?: string; + }) => Promise<{ instanceDid: string }>; + }; // Unsplash UNSPLASH_KEY: string; UNSPLASH_SECRET: string; @@ -34,7 +67,7 @@ export interface ConfirmQueueMessage { export interface UserContext { id: string; - role: 'admin' | 'member'; + role: 'owner' | 'admin' | 'member' | 'guest'; } // Hono env bindings diff --git a/cloudflare/src/worker.ts b/cloudflare/src/worker.ts index ba999c9..b1648d4 100644 --- a/cloudflare/src/worker.ts +++ b/cloudflare/src/worker.ts @@ -12,9 +12,70 @@ import { cleanupExpiredSessions } from './routes/cleanup'; const app = new Hono(); -// Global middleware -// TODO: restrict CORS origin to actual deployment domain when auth is implemented -app.use('*', cors()); +// Prefix strip middleware — allows mounting at a sub-path (e.g. /media-kit/) +// When APP_PREFIX is set, requests to /media-kit/* are internally rewritten to /* +// and X-Mount-Prefix is set so __blocklet__.js and HTML rewriting work correctly. +app.use('*', async (c, next) => { + const prefix = c.env.APP_PREFIX; + if (!prefix || prefix === '/') return next(); + + const pfx = prefix.endsWith('/') ? prefix.slice(0, -1) : prefix; + const url = new URL(c.req.url); + if (url.pathname.startsWith(pfx + '/') || url.pathname === pfx) { + // Strip prefix and rewrite URL + const newPath = url.pathname.slice(pfx.length) || '/'; + url.pathname = newPath; + const newReq = new Request(url.toString(), c.req.raw); + newReq.headers.set('X-Mount-Prefix', pfx + '/'); + // Replace the raw request so downstream sees the stripped path + return app.fetch(newReq, c.env); + } + + return next(); +}); + +// Root path redirect: logged in → /media-kit/admin, not logged in → login +// /.well-known/service/* is global (no prefix) — it's the auth service +app.get('/', async (c) => { + const pfx = c.env.APP_PREFIX || ''; + const loginUrl = '/.well-known/service/login'; + const adminUrl = `${pfx}/admin`; + + const authService = c.env.AUTH_SERVICE; + if (!authService || typeof authService.resolveIdentity !== 'function') { + return c.redirect(loginUrl); + } + + const cookieHeader = c.req.header('Cookie') || ''; + const match = cookieHeader.match(/(?:^|;\s*)login_token=([^;]*)/); + const jwt = match ? decodeURIComponent(match[1]) : null; + if (!jwt) { + return c.redirect(loginUrl); + } + + try { + const caller = await authService.resolveIdentity(jwt, null, c.env.APP_PID); + if (caller) { + return c.redirect(adminUrl); + } + } catch {} + + return c.redirect(loginUrl); +}); + +// Global middleware — CORS restricted to deployment origin +app.use( + '*', + cors({ + origin: (origin) => { + // Auth is enforced by AUTH_SERVICE — CORS allows same-origin requests with credentials + return origin || ''; + }, + credentials: true, + allowHeaders: ['Content-Type', 'Authorization'], + allowMethods: ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'], + }) +); // TODO: D1 write consistency — When Drizzle ORM adds support for D1's withSession API, // wrap write-path requests with `c.env.DB.withSession("first-primary")` to ensure // read-after-write consistency. Without this, D1 replicas may serve stale reads @@ -25,14 +86,170 @@ app.use('*', async (c, next) => { return next(); }); +// Auto-register instance in DID service on first request +let registeredInstanceDid: string | null = null; + +async function ensureRegistered(env: Env): Promise { + if (registeredInstanceDid) return registeredInstanceDid; + if (!env.AUTH_SERVICE || !env.APP_SK) { + return env.APP_PID || ''; + } + try { + const result = await env.AUTH_SERVICE.registerApp({ + instanceDid: 'auto', + appSk: env.APP_SK, + appName: env.APP_NAME || 'Media Kit', + appDescription: 'Media asset management', + }); + registeredInstanceDid = result.instanceDid; + console.log(`[media-kit] Registered as instance: ${registeredInstanceDid}`); + return registeredInstanceDid; + } catch (e: any) { + console.error('[media-kit] registerApp failed:', e?.message || e); + return env.APP_PID || ''; + } +} + +// Resolve instance DID on every request (cached after first call) +app.use('*', async (c, next) => { + const instanceDid = await ensureRegistered(c.env); + if (instanceDid) { + // Override APP_PID with the derived instance DID + (c.env as any).APP_PID = instanceDid; + } + return next(); +}); + +// DID Auth login/session routes — proxy to AUTH_SERVICE (blocklet-service) +const DID_AUTH_PROXY_PATHS = [ + '/api/did/login/', + '/api/did/session', + '/api/did/refreshSession', + '/api/did/connect/', + '/api/did/logout', +]; + +app.all('/api/did/*', async (c) => { + const path = new URL(c.req.url).pathname; + const shouldProxy = DID_AUTH_PROXY_PATHS.some((p) => path.startsWith(p) || path === p); + if (!shouldProxy) { + return c.json({ error: 'Not Found' }, 404); + } + if (!c.env.AUTH_SERVICE) { + return c.json({ error: 'AUTH_SERVICE not configured' }, 503); + } + const url = new URL(c.req.url); + url.pathname = `/.well-known/service${url.pathname}`; + const req = new Request(url.toString(), c.req.raw); + if (c.env.APP_PID) { + req.headers.set('X-Instance-Did', c.env.APP_PID); + } + const resp = await c.env.AUTH_SERVICE.fetch(req); + return new Response(resp.body, { status: resp.status, statusText: resp.statusText, headers: new Headers(resp.headers) }); +}); + +// Proxy all /.well-known/service/* to AUTH_SERVICE (login page, session API, admin, etc.) +app.all('/.well-known/service/*', async (c) => { + if (!c.env.AUTH_SERVICE) { + return c.json({ error: 'AUTH_SERVICE not configured' }, 503); + } + const req = new Request(c.req.url, c.req.raw); + if (c.env.APP_PID) { + req.headers.set('X-Instance-Did', c.env.APP_PID); + req.headers.set('X-Arc-Domain', new URL(c.req.url).host); + } + const resp = await c.env.AUTH_SERVICE.fetch(req); + return new Response(resp.body, { status: resp.status, statusText: resp.statusText, headers: new Headers(resp.headers) }); +}); + +// Media Kit component DID (used by uploader to detect media-kit) +const MEDIA_KIT_COMPONENT_DID = 'z8ia1mAXo8ZE7ytGF36L5uBf9kD2kenhqFGp9'; + +// __blocklet__.js — app metadata for frontend SessionProvider +app.get('/__blocklet__.js', async (c) => { + const isJson = new URL(c.req.url).searchParams.get('type') === 'json'; + const requestOrigin = new URL(c.req.url).origin; + const mountPrefix = c.req.header('X-Mount-Prefix') || '/'; + const defaultPreferences = { + extsInput: c.env.ALLOWED_FILE_TYPES || '.jpeg,.png,.gif,.svg,.webp,.bmp,.ico', + maxUploadSize: c.env.MAX_UPLOAD_SIZE || '500MB', + useAiImage: c.env.USE_AI_IMAGE === 'true', + }; + const data: Record = { + appPid: c.env.APP_PID || '', + appName: c.env.APP_NAME || 'Media Kit', + appUrl: requestOrigin, + prefix: mountPrefix, + groupPrefix: mountPrefix, + cloudflareWorker: true, + inCFWorkers: true, + componentId: MEDIA_KIT_COMPONENT_DID, + preferences: defaultPreferences, + componentMountPoints: [{ + title: 'Media Kit', + name: 'image-bin', + did: MEDIA_KIT_COMPONENT_DID, + version: '1.0.0', + status: 'running', + mountPoint: mountPrefix, + }], + }; + + // Merge auth service metadata (appPid, appUrl, DID, theme, etc.) + if (c.env.AUTH_SERVICE) { + try { + const url = new URL(c.req.url); + url.pathname = '/__blocklet__.js'; + url.searchParams.set('type', 'json'); + const blockletReq = new Request(url.toString(), c.req.raw); + if (c.env.APP_PID) blockletReq.headers.set('X-Instance-Did', c.env.APP_PID); + const resp = await c.env.AUTH_SERVICE.fetch(blockletReq); + if (resp.ok) { + const authData = (await resp.json()) as Record; + const authPreferences = authData.preferences as Record | undefined; + Object.assign(data, authData, { + appName: c.env.APP_NAME || authData.appName || 'Media Kit', + appUrl: requestOrigin, + prefix: mountPrefix, + groupPrefix: mountPrefix, + cloudflareWorker: true, + inCFWorkers: true, + componentId: MEDIA_KIT_COMPONENT_DID, + preferences: { ...defaultPreferences, ...authPreferences }, + // Ensure media-kit component is always present + componentMountPoints: [ + ...((authData.componentMountPoints as any[]) || []), + { + title: 'Media Kit', + name: 'image-bin', + did: MEDIA_KIT_COMPONENT_DID, + version: '1.0.0', + status: 'running', + mountPoint: mountPrefix, + }, + ], + }); + } + } catch (e: any) { + console.error('[__blocklet__.js] AUTH_SERVICE fetch error:', e?.message || e); + } + } + + if (isJson) { + return c.json(data); + } + return c.text(`window.blocklet = ${JSON.stringify(data)};`, 200, { + 'Content-Type': 'application/javascript', + }); +}); + // File serving (no auth required for public files, EXIF stripped) app.route('/', fileServingRoutes); // Status endpoint (no auth) app.route('/api', statusRoutes); -// Auth-protected routes -// TODO: replace x-user-did header auth with shijun's CF auth SDK when ready +// Auth-protected routes (via AUTH_SERVICE RPC) app.use('/api/*', authMiddleware); app.route('/api', uploadRoutes); app.route('/api', folderRoutes); @@ -115,18 +332,55 @@ app.post('/api/image/generations', async (c) => { // Health check app.get('/health', (c) => c.json({ status: 'ok', version: '1.0.0' })); -// SPA fallback — non-API routes return index.html for client-side routing +// SPA fallback — non-API routes return index.html with prefix-aware asset rewriting app.notFound(async (c) => { const path = new URL(c.req.url).pathname; if (path.startsWith('/api/') || path.startsWith('/health')) { return c.json({ error: 'Not Found' }, 404); } + + const assets = (c.env as any).ASSETS; + if (!assets) { + return c.json({ error: 'Not Found' }, 404); + } + + // Rewrite HTML for mount prefix support + inject __blocklet__.js + const rewriteHtml = async (htmlResponse: Response) => { + if (!htmlResponse.headers.get('content-type')?.includes('text/html')) return htmlResponse; + let html = await htmlResponse.text(); + const mountPrefix = c.req.header('X-Mount-Prefix'); + if (mountPrefix && mountPrefix !== '/') { + const pfx = mountPrefix.endsWith('/') ? mountPrefix.slice(0, -1) : mountPrefix; + // Rewrite all absolute asset/src paths in HTML attributes + html = html.replace(/((?:src|href)=["'])\/(assets|src)\//g, `$1${pfx}/$2/`); + // Rewrite __blocklet__.js script tag + html = html.replace(/(]*src=["'])\/__blocklet__\.js(["'])/g, `$1${pfx}/__blocklet__.js$2`); + } + return new Response(html, { + status: htmlResponse.status, + headers: { ...Object.fromEntries(htmlResponse.headers.entries()), 'Cache-Control': 'no-cache' }, + }); + }; + try { - const assets = (c.env as any).ASSETS; - if (assets) { - return assets.fetch(new Request(new URL('/', c.req.url))); + // Try exact asset first + const assetResponse = await assets.fetch(c.req.raw); + if (assetResponse.status !== 404) { + if (assetResponse.headers.get('content-type')?.includes('text/html')) { + return rewriteHtml(assetResponse); + } + return assetResponse; } } catch {} + + // Fall back to index.html for SPA routing + try { + const url = new URL(c.req.url); + url.pathname = '/index.html'; + const htmlResponse = await assets.fetch(new Request(url.toString(), c.req.raw)); + return rewriteHtml(htmlResponse); + } catch {} + return c.json({ error: 'Not Found' }, 404); }); diff --git a/cloudflare/wrangler.toml b/cloudflare/wrangler.toml index 13bbf28..f4305d2 100644 --- a/cloudflare/wrangler.toml +++ b/cloudflare/wrangler.toml @@ -6,7 +6,8 @@ compatibility_flags = ["nodejs_compat"] # Serve frontend static assets (vite build output) [assets] directory = "./public" -not_found_handling = "single-page-application" +not_found_handling = "none" +html_handling = "none" binding = "ASSETS" [vars] @@ -14,9 +15,16 @@ ENVIRONMENT = "production" MAX_UPLOAD_SIZE = "500MB" ALLOWED_FILE_TYPES = ".jpeg,.png,.gif,.svg,.webp,.bmp,.ico" USE_AI_IMAGE = "true" -ADMIN_DIDS = "did:abt:default-uploader" +APP_NAME = "Media Kit" +APP_PREFIX = "/media-kit" AIGNE_HUB_URL = "https://hub.aigne.io" +# Service Binding to DID Connect Auth Worker (blocklet-service) +[[services]] +binding = "AUTH_SERVICE" +service = "blocklet-service" +entrypoint = "BlockletServiceRPC" + [[r2_buckets]] binding = "R2_UPLOADS" bucket_name = "media-kit-uploads" @@ -31,6 +39,7 @@ migrations_dir = "migrations" crons = ["0 * * * *"] # Secrets (wrangler secret put): +# APP_SK (64-byte hex secret key — used to auto-register instance in DID service) # R2_ACCESS_KEY_ID, R2_SECRET_ACCESS_KEY # R2_ORIGIN_DOMAIN (R2 public domain, restricted by IP Access Rule) # UNSPLASH_KEY, UNSPLASH_SECRET @@ -42,4 +51,4 @@ crons = ["0 * * * *"] name = "media-kit-staging" [env.staging.vars] ENVIRONMENT = "staging" -ADMIN_DIDS = "did:abt:default-uploader" +APP_NAME = "Media Kit (Staging)" From b5e1d8db3b272a3cdedadda3cdde1ce03e1b3260 Mon Sep 17 00:00:00 2001 From: xiaomoziyi <823346486@qq.com> Date: Fri, 10 Apr 2026 10:39:12 +0800 Subject: [PATCH 13/21] fix(cloudflare): redirect to login instead of showing 403 for unauthenticated users --- .../src/shims/did-connect-react-session.tsx | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/cloudflare/frontend/src/shims/did-connect-react-session.tsx b/cloudflare/frontend/src/shims/did-connect-react-session.tsx index 7b1c53f..81a9b32 100644 --- a/cloudflare/frontend/src/shims/did-connect-react-session.tsx +++ b/cloudflare/frontend/src/shims/did-connect-react-session.tsx @@ -44,19 +44,15 @@ function SessionProvider({ const user = data?.user || null; setSession({ user, loading: false, error: null }); - // Redirect to login if on a protected route and not authenticated - if (!user && protectedRoutes?.length) { - const path = window.location.pathname; - const isProtected = protectedRoutes.some( - (r) => r.endsWith('*') ? path.startsWith(r.slice(0, -1)) : path === r - ); - if (isProtected) { - const returnUrl = encodeURIComponent(path + window.location.search); - window.location.href = `/.well-known/service/login?return=${returnUrl}`; - } + // Not authenticated — redirect to login + if (!user) { + const returnUrl = encodeURIComponent(window.location.pathname + window.location.search); + window.location.href = `/.well-known/service/login?return=${returnUrl}`; } } catch { - setSession({ user: null, loading: false, error: null }); + // Session fetch failed — redirect to login + const returnUrl = encodeURIComponent(window.location.pathname + window.location.search); + window.location.href = `/.well-known/service/login?return=${returnUrl}`; } }, [protectedRoutes]); From b341c159c4034a03b5d71efa0363033cdf210c23 Mon Sep 17 00:00:00 2001 From: xiaomoziyi <823346486@qq.com> Date: Fri, 10 Apr 2026 10:40:36 +0800 Subject: [PATCH 14/21] docs(cloudflare): update README with auth integration, prefix support and merge checklist --- cloudflare/README.md | 104 ++++++++++++++++++++++++++++++++++++++----- 1 file changed, 92 insertions(+), 12 deletions(-) diff --git a/cloudflare/README.md b/cloudflare/README.md index 3c4121a..a4a368b 100644 --- a/cloudflare/README.md +++ b/cloudflare/README.md @@ -6,6 +6,7 @@ - pnpm - Cloudflare 账号(免费即可) - Wrangler CLI:`npm install -g wrangler` +- DID service(blocklet-service)已部署为 CF Worker ## 一次性初始化(首次部署) @@ -60,6 +61,10 @@ Dashboard → R2 → 管理 R2 API 令牌 → 创建 API 令牌: ```bash cd cloudflare +# APP_SK — 用于在 DID service 中注册 instance(自动派生 instance DID) +# 生成方法:node -e "console.log(require('crypto').randomBytes(32).toString('hex'))" +wrangler secret put APP_SK + # R2 凭证 wrangler secret put R2_ACCESS_KEY_ID wrangler secret put R2_SECRET_ACCESS_KEY @@ -69,8 +74,6 @@ wrangler secret put CF_ACCOUNT_ID # AIGNE Hub(AI Image 功能需要) wrangler secret put AIGNE_HUB_API_KEY -# 可选:覆盖默认 AIGNE Hub URL(默认 https://hub.aigne.io) -# wrangler secret put AIGNE_HUB_URL # 可选:Unsplash API wrangler secret put UNSPLASH_KEY @@ -100,6 +103,76 @@ cd cloudflare && wrangler deploy # 部署 Worker + 静态资源 部署后访问 `https://media-kit..workers.dev` +## 认证架构 + +Media Kit 通过 CF Service Binding 对接 DID service(blocklet-service)进行用户认证。 + +``` +用户请求(带 login_token cookie) + ↓ +media-kit Worker + ↓ AUTH_SERVICE.resolveIdentity(jwt, authHeader, instanceDid) +blocklet-service Worker(Service Binding,零网络延迟) + ↓ 返回 { did, role, displayName } +media-kit 设置 user context → 处理请求 +``` + +### 关键配置 + +```toml +# wrangler.toml + +# Service Binding 到 DID service +[[services]] +binding = "AUTH_SERVICE" +service = "blocklet-service" +entrypoint = "BlockletServiceRPC" +``` + +### 自动注册 + +Worker 首次启动时通过 `AUTH_SERVICE.registerApp()` 自动注册 instance: +- 从 `APP_SK`(secret)派生 instance DID +- 无需手动配置 `APP_PID` — 自动从 APP_SK 生成 + +### 认证流程 + +1. 用户访问 `/` → 未登录时 302 到 `/.well-known/service/login` +2. DID service 提供登录页(passkey / wallet / email) +3. 登录成功 → 设置 `login_token` cookie → 重定向回 media-kit +4. media-kit auth middleware 从 cookie 提取 JWT → 调用 `AUTH_SERVICE.resolveIdentity()` 验证 +5. JWT 验证结果缓存 5 分钟(避免重复 RPC 调用) + +### 路由代理 + +| 路径 | 处理方式 | +|------|---------| +| `/.well-known/service/*` | 代理到 AUTH_SERVICE(登录页、session API、管理后台) | +| `/api/did/*` | 代理到 AUTH_SERVICE(login/session/logout) | +| `/__blocklet__.js` | Worker 生成(合并 AUTH_SERVICE 元数据) | +| `/api/uploader/status` | 无需认证 — 返回 uploader 配置 | +| `/api/*`(其他) | 需要认证 | + +## Prefix 支持 + +Media Kit 支持挂载在子路径下运行,通过 `APP_PREFIX` 环境变量配置: + +```toml +# wrangler.toml +[vars] +APP_PREFIX = "/media-kit" # 可改为任意路径,留空或 "/" 表示根路径 +``` + +配置后: +- 访问 `/media-kit/admin` → media-kit 管理页面 +- 访问 `/media-kit/api/*` → media-kit API +- 访问 `/media-kit/__blocklet__.js` → 返回正确的 prefix 配置 +- 访问 `/` → 自动重定向到 `/media-kit/admin`(已登录)或登录页(未登录) + +Prefix 也支持通过 gateway Worker 的 `X-Mount-Prefix` header 动态设置。 + +**注意**:`/.well-known/service/*` 是全局认证服务路径,不加 prefix。 + ## 数据迁移(从 Blocklet Server 迁移) 如果需要从现有 Blocklet Server 迁移数据: @@ -166,6 +239,7 @@ cd cloudflare # 创建 .dev.vars 文件 cat > .dev.vars << 'EOF' ENVIRONMENT=development +APP_SK=<64-byte-hex-secret-key> R2_ACCESS_KEY_ID=your-key R2_SECRET_ACCESS_KEY=your-secret CF_ACCOUNT_ID=your-account-id @@ -187,33 +261,34 @@ cd frontend && npx vite --port 3030 ``` cloudflare/ src/ # CF Worker 后端(Hono + D1 + R2) - worker.ts # 入口:路由 + AIGNE Hub 代理 + worker.ts # 入口:prefix strip、root redirect、auth proxy、路由、SPA fallback routes/ - upload.ts # 上传:presign / proxy-put(开发模式代理上传) / direct(FormData 上传) / confirm + upload.ts # 上传:presign / proxy-put / direct / confirm serve.ts # 文件服务:R2 → 响应(生产用 cf.image) folders.ts # 文件夹 CRUD - status.ts # Uploader 配置 + status.ts # Uploader 配置(uploadMode: presigned) unsplash.ts # Unsplash 代理 - cleanup.ts # 定时清理过期 session + AI 临时图片(tmp/ai/,24h) - middleware/auth.ts # x-user-did 认证 + cleanup.ts # 定时清理过期 session + AI 临时图片 + middleware/auth.ts # AUTH_SERVICE RPC 认证 + JWT 缓存 db/schema.ts # Drizzle ORM 表定义 + types.ts # Env、CallerIdentityDTO、UserContext 类型 utils/ s3.ts # R2 S3 兼容 API(presigned URL、multipart) hash.ts # MD5 哈希、MIME 检测、SVG 净化 frontend/ # 前端构建配置 vite.config.ts # Alias 指向原版源码 + shim src/shims/ # Blocklet SDK 替代实现 - index.html # window.blocklet 注入 + index.html # window.blocklet 默认值 + __blocklet__.js 加载 public/ # vite build 产物(Worker 静态资源) - wrangler.toml # CF Workers 配置 + wrangler.toml # CF Workers 配置(Service Binding、prefix 等) migrations/ # D1 数据库迁移 scripts/migrate-data.ts # SQLite → D1 迁移脚本 ``` 前端源码复用 `blocklets/image-bin/src/`,通过 Vite alias 将依赖 Blocklet Server 运行时的包替换为 shim: -- `@blocklet/js-sdk` → createAxios shim(标准 axios) -- `@blocklet/ui-react` → Dashboard/Header/Footer/ComponentInstaller shim -- `@arcblock/did-connect-react` → SessionProvider/ConnectButton shim +- `@blocklet/js-sdk` → createAxios shim(axios + withCredentials) +- `@blocklet/ui-react` → Dashboard(含 Header)/ Header / Footer / ComponentInstaller shim +- `@arcblock/did-connect-react` → SessionProvider(真实 DID Connect session via cookie)/ ConnectButton shim `@arcblock/ux` 和 `@arcblock/did` 直接使用原包(纯 UI 组件,无 Blocklet Server 依赖)。 @@ -221,8 +296,13 @@ cloudflare/ | 特性 | 本地开发 | 线上 | |------|---------|------| +| 认证 | AUTH_SERVICE Service Binding(需本地运行 blocklet-service) | AUTH_SERVICE Service Binding | | R2 存储 | miniflare 本地模拟 | 真实 R2 | | D1 数据库 | 本地 SQLite | 真实 D1 | | Presigned URL | proxy-put 代理(避免 CORS) | 直传 R2(需配 CORS) | | 文件服务 | R2 binding 直接读取 | cf.image EXIF 剥离 + 自动 WebP | | AI 图片生成 | 代理到 hub.aigne.io,临时缓存到 R2 tmp/ai/ | 同左,cron 每小时清理 24h 前的临时文件 | + +## Merge 前待完成 + +- [ ] Production optimization: S3 CopyObject for large file dedup(独立于认证,可随时做) From fe28a923d3c3b588c464989a9ec256050c135ee1 Mon Sep 17 00:00:00 2001 From: xiaomoziyi <823346486@qq.com> Date: Fri, 10 Apr 2026 10:42:03 +0800 Subject: [PATCH 15/21] docs(cloudflare): remove unnecessary S3 CopyObject TODO --- cloudflare/README.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/cloudflare/README.md b/cloudflare/README.md index a4a368b..27fd7ce 100644 --- a/cloudflare/README.md +++ b/cloudflare/README.md @@ -302,7 +302,3 @@ cloudflare/ | Presigned URL | proxy-put 代理(避免 CORS) | 直传 R2(需配 CORS) | | 文件服务 | R2 binding 直接读取 | cf.image EXIF 剥离 + 自动 WebP | | AI 图片生成 | 代理到 hub.aigne.io,临时缓存到 R2 tmp/ai/ | 同左,cron 每小时清理 24h 前的临时文件 | - -## Merge 前待完成 - -- [ ] Production optimization: S3 CopyObject for large file dedup(独立于认证,可随时做) From 0c5261dbc4fe0916cebf74dad3682159c4152a4a Mon Sep 17 00:00:00 2001 From: xiaomoziyi <823346486@qq.com> Date: Fri, 10 Apr 2026 10:55:41 +0800 Subject: [PATCH 16/21] fix(cloudflare): prevent double-slash in redirect when APP_PREFIX is root --- cloudflare/src/worker.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cloudflare/src/worker.ts b/cloudflare/src/worker.ts index b1648d4..eb03d01 100644 --- a/cloudflare/src/worker.ts +++ b/cloudflare/src/worker.ts @@ -37,7 +37,8 @@ app.use('*', async (c, next) => { // Root path redirect: logged in → /media-kit/admin, not logged in → login // /.well-known/service/* is global (no prefix) — it's the auth service app.get('/', async (c) => { - const pfx = c.env.APP_PREFIX || ''; + const raw = c.env.APP_PREFIX || '/'; + const pfx = raw === '/' ? '' : raw.replace(/\/$/, ''); const loginUrl = '/.well-known/service/login'; const adminUrl = `${pfx}/admin`; From 7e99e7b595edb30a8a47f3668033829486253176 Mon Sep 17 00:00:00 2001 From: xiaomoziyi <823346486@qq.com> Date: Fri, 10 Apr 2026 10:56:37 +0800 Subject: [PATCH 17/21] chore(cloudflare): remove APP_PREFIX from wrangler.toml, set via env instead --- cloudflare/wrangler.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/cloudflare/wrangler.toml b/cloudflare/wrangler.toml index f4305d2..36aeafc 100644 --- a/cloudflare/wrangler.toml +++ b/cloudflare/wrangler.toml @@ -16,7 +16,6 @@ MAX_UPLOAD_SIZE = "500MB" ALLOWED_FILE_TYPES = ".jpeg,.png,.gif,.svg,.webp,.bmp,.ico" USE_AI_IMAGE = "true" APP_NAME = "Media Kit" -APP_PREFIX = "/media-kit" AIGNE_HUB_URL = "https://hub.aigne.io" # Service Binding to DID Connect Auth Worker (blocklet-service) From 1d92b5f815f3213e7c940340221b6931b50274df Mon Sep 17 00:00:00 2001 From: xiaomoziyi <823346486@qq.com> Date: Fri, 10 Apr 2026 11:02:16 +0800 Subject: [PATCH 18/21] docs(cloudflare): update APP_PREFIX to use secret instead of vars --- cloudflare/README.md | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/cloudflare/README.md b/cloudflare/README.md index 27fd7ce..76afeae 100644 --- a/cloudflare/README.md +++ b/cloudflare/README.md @@ -75,6 +75,9 @@ wrangler secret put CF_ACCOUNT_ID # AIGNE Hub(AI Image 功能需要) wrangler secret put AIGNE_HUB_API_KEY +# 可选:挂载子路径(默认 /,即根路径) +# wrangler secret put APP_PREFIX # 例如 /media-kit + # 可选:Unsplash API wrangler secret put UNSPLASH_KEY wrangler secret put UNSPLASH_SECRET @@ -155,15 +158,13 @@ Worker 首次启动时通过 `AUTH_SERVICE.registerApp()` 自动注册 instance ## Prefix 支持 -Media Kit 支持挂载在子路径下运行,通过 `APP_PREFIX` 环境变量配置: +Media Kit 支持挂载在子路径下运行,通过 `APP_PREFIX` secret 配置: -```toml -# wrangler.toml -[vars] -APP_PREFIX = "/media-kit" # 可改为任意路径,留空或 "/" 表示根路径 +```bash +wrangler secret put APP_PREFIX # 输入如 /media-kit 或 /media-kit/ ``` -配置后: +不设置时默认为 `/`(根路径)。尾部斜杠自动 normalize。配置后: - 访问 `/media-kit/admin` → media-kit 管理页面 - 访问 `/media-kit/api/*` → media-kit API - 访问 `/media-kit/__blocklet__.js` → 返回正确的 prefix 配置 From 5f5658d0c8ab4087ebe5f2fae7a1e0215764caba Mon Sep 17 00:00:00 2001 From: xiaomoziyi <823346486@qq.com> Date: Fri, 10 Apr 2026 11:03:24 +0800 Subject: [PATCH 19/21] docs(cloudflare): APP_PREFIX via dashboard vars, not secret --- cloudflare/README.md | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/cloudflare/README.md b/cloudflare/README.md index 76afeae..e08ffc8 100644 --- a/cloudflare/README.md +++ b/cloudflare/README.md @@ -76,7 +76,7 @@ wrangler secret put CF_ACCOUNT_ID wrangler secret put AIGNE_HUB_API_KEY # 可选:挂载子路径(默认 /,即根路径) -# wrangler secret put APP_PREFIX # 例如 /media-kit +# 在 Dashboard → Workers → Settings → Variables 中设置 APP_PREFIX,例如 /media-kit # 可选:Unsplash API wrangler secret put UNSPLASH_KEY @@ -158,11 +158,9 @@ Worker 首次启动时通过 `AUTH_SERVICE.registerApp()` 自动注册 instance ## Prefix 支持 -Media Kit 支持挂载在子路径下运行,通过 `APP_PREFIX` secret 配置: +Media Kit 支持挂载在子路径下运行,通过 `APP_PREFIX` 环境变量配置: -```bash -wrangler secret put APP_PREFIX # 输入如 /media-kit 或 /media-kit/ -``` +在 Dashboard → Workers → media-kit → Settings → Variables 中添加 `APP_PREFIX`,值如 `/media-kit`。 不设置时默认为 `/`(根路径)。尾部斜杠自动 normalize。配置后: - 访问 `/media-kit/admin` → media-kit 管理页面 From 1c984571aadc8a1c059cad6d59b4df657d10e2a4 Mon Sep 17 00:00:00 2001 From: xiaomoziyi <823346486@qq.com> Date: Fri, 10 Apr 2026 11:32:08 +0800 Subject: [PATCH 20/21] fix(cloudflare): address cross-review security and logic issues - CORS: restrict to same-origin only (was echoing any origin with credentials) - Prefix strip: add X-Prefix-Stripped header to prevent middleware double-execution - JWT cache: extract raw token from Bearer header for correct expiry parsing - Cleanup: don't skip AI temp image cleanup when no expired sessions exist - isAdminMiddleware: guard against undefined user context --- cloudflare/src/middleware/auth.ts | 8 ++++++-- cloudflare/src/routes/cleanup.ts | 2 -- cloudflare/src/worker.ts | 13 ++++++++----- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/cloudflare/src/middleware/auth.ts b/cloudflare/src/middleware/auth.ts index a448347..e02c48d 100644 --- a/cloudflare/src/middleware/auth.ts +++ b/cloudflare/src/middleware/auth.ts @@ -67,8 +67,9 @@ export async function authMiddleware(c: Context, next: Next) { const jwt = match ? decodeURIComponent(match[1]) : null; const authHeader = c.req.header('Authorization') || null; - // Try cache first - const cacheKey = jwt || authHeader; + // Try cache first — extract raw token from Bearer header for correct expiry parsing + const rawToken = authHeader?.startsWith('Bearer ') ? authHeader.slice(7) : authHeader; + const cacheKey = jwt || rawToken; let caller: CallerIdentityDTO | null = null; if (cacheKey) { caller = getCachedIdentity(cacheKey); @@ -102,6 +103,9 @@ export async function authMiddleware(c: Context, next: Next) { export async function isAdminMiddleware(c: Context, next: Next) { const user = c.get('user'); + if (!user) { + return c.json({ error: 'Unauthorized' }, 401); + } if (user.role !== 'admin') { return c.json({ error: 'Admin access required' }, 403); } diff --git a/cloudflare/src/routes/cleanup.ts b/cloudflare/src/routes/cleanup.ts index f237581..5786745 100644 --- a/cloudflare/src/routes/cleanup.ts +++ b/cloudflare/src/routes/cleanup.ts @@ -27,8 +27,6 @@ export async function cleanupExpiredSessions(env: Env): Promise { ), ); - if (expired.length === 0) return; - for (const session of expired) { if (session.uploadId && session.key) { // Multipart session — abort via R2 binding diff --git a/cloudflare/src/worker.ts b/cloudflare/src/worker.ts index eb03d01..bac89fc 100644 --- a/cloudflare/src/worker.ts +++ b/cloudflare/src/worker.ts @@ -16,18 +16,20 @@ const app = new Hono(); // When APP_PREFIX is set, requests to /media-kit/* are internally rewritten to /* // and X-Mount-Prefix is set so __blocklet__.js and HTML rewriting work correctly. app.use('*', async (c, next) => { + // Already stripped by a previous pass — skip + if (c.req.header('X-Prefix-Stripped')) return next(); + const prefix = c.env.APP_PREFIX; if (!prefix || prefix === '/') return next(); const pfx = prefix.endsWith('/') ? prefix.slice(0, -1) : prefix; const url = new URL(c.req.url); if (url.pathname.startsWith(pfx + '/') || url.pathname === pfx) { - // Strip prefix and rewrite URL const newPath = url.pathname.slice(pfx.length) || '/'; url.pathname = newPath; const newReq = new Request(url.toString(), c.req.raw); newReq.headers.set('X-Mount-Prefix', pfx + '/'); - // Replace the raw request so downstream sees the stripped path + newReq.headers.set('X-Prefix-Stripped', '1'); return app.fetch(newReq, c.env); } @@ -68,9 +70,10 @@ app.get('/', async (c) => { app.use( '*', cors({ - origin: (origin) => { - // Auth is enforced by AUTH_SERVICE — CORS allows same-origin requests with credentials - return origin || ''; + origin: (origin, c) => { + // Only allow same-origin — SPA is served from the same worker + const self = new URL(c.req.url).origin; + return origin === self ? origin : ''; }, credentials: true, allowHeaders: ['Content-Type', 'Authorization'], From 85b96c9b77055323a7e4da8a2aac3a1cbd9953d1 Mon Sep 17 00:00:00 2001 From: xiaomoziyi <823346486@qq.com> Date: Fri, 10 Apr 2026 11:42:53 +0800 Subject: [PATCH 21/21] chore(release): bump version to 0.15.0 and add Cloudflare Workers deployment for Media Kit --- CHANGELOG.md | 4 ++++ blocklets/image-bin/blocklet.yml | 2 +- blocklets/image-bin/package.json | 2 +- package.json | 2 +- packages/uploader-server/package.json | 2 +- packages/uploader/package.json | 2 +- packages/xss/package.json | 2 +- version | 2 +- 8 files changed, 11 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 752c79a..04ce1b9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.15.0 (2026-4-10) + +- feat: Cloudflare Workers deployment for Media Kit + ## 0.14.20 (2026-1-7) - feat: change bundle method diff --git a/blocklets/image-bin/blocklet.yml b/blocklets/image-bin/blocklet.yml index 3ab0745..f83db99 100644 --- a/blocklets/image-bin/blocklet.yml +++ b/blocklets/image-bin/blocklet.yml @@ -1,5 +1,5 @@ name: image-bin -version: 0.14.20 +version: 0.15.0 title: Media Kit description: >- Self-hosted media management that replaces expensive cloud services while diff --git a/blocklets/image-bin/package.json b/blocklets/image-bin/package.json index eac149e..83c632a 100644 --- a/blocklets/image-bin/package.json +++ b/blocklets/image-bin/package.json @@ -1,6 +1,6 @@ { "name": "image-bin", - "version": "0.14.20", + "version": "0.15.0", "private": true, "scripts": { "dev": "blocklet dev", diff --git a/package.json b/package.json index 297223e..d166089 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "image-bin", - "version": "0.14.20", + "version": "0.15.0", "private": true, "scripts": { "lint": "pnpm -r lint", diff --git a/packages/uploader-server/package.json b/packages/uploader-server/package.json index 7873e8c..3c3cec1 100644 --- a/packages/uploader-server/package.json +++ b/packages/uploader-server/package.json @@ -1,6 +1,6 @@ { "name": "@blocklet/uploader-server", - "version": "0.14.20", + "version": "0.15.0", "description": "blocklet upload server", "publishConfig": { "access": "public" diff --git a/packages/uploader/package.json b/packages/uploader/package.json index 0edf263..f8c613e 100644 --- a/packages/uploader/package.json +++ b/packages/uploader/package.json @@ -1,6 +1,6 @@ { "name": "@blocklet/uploader", - "version": "0.14.20", + "version": "0.15.0", "description": "blocklet upload component", "publishConfig": { "access": "public" diff --git a/packages/xss/package.json b/packages/xss/package.json index 1a2a0b5..a691013 100644 --- a/packages/xss/package.json +++ b/packages/xss/package.json @@ -1,6 +1,6 @@ { "name": "@blocklet/xss", - "version": "0.14.20", + "version": "0.15.0", "description": "blocklet prevent xss attack", "publishConfig": { "access": "public" diff --git a/version b/version index eaebb62..7092c7c 100644 --- a/version +++ b/version @@ -1 +1 @@ -0.14.20 \ No newline at end of file +0.15.0 \ No newline at end of file