feat(upload): enhance upload functionality with chunk management and cancellation support
- Updated Upload.vue to include cancelItem functionality in the upload queue. - Modified UploadQueue.vue to emit cancel events for individual items. - Enhanced UploadQueueItem.vue to display cancel button for ongoing uploads. - Added merge.ts for handling manifest creation and S3 operations for chunk uploads. - Introduced temp.html for testing multi-threaded chunk uploads with progress tracking. - Created AGENTS.md for comprehensive project documentation and guidelines.
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
import { tryGetContext } from "hono/context-storage";
|
||||
|
||||
export const baseAPIURL = "https://api.pipic.fun";
|
||||
export const customFetch = (url: string, options: RequestInit) => {
|
||||
options.credentials = "include";
|
||||
const c = tryGetContext<any>();
|
||||
@@ -21,7 +21,7 @@ export const customFetch = (url: string, options: RequestInit) => {
|
||||
...(options.headers as Record<string, string>),
|
||||
};
|
||||
|
||||
const apiUrl = ["https://api.pipic.fun", url.replace(/^r/, "")].join("");
|
||||
const apiUrl = [baseAPIURL, url.replace(/^r/, "")].join("");
|
||||
return fetch(apiUrl, options).then(async (res) => {
|
||||
res.headers.getSetCookie()?.forEach((cookie) => {
|
||||
c.header("Set-Cookie", cookie);
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
import { getContext } from "hono/context-storage";
|
||||
import { HonoVarTypes } from "types";
|
||||
|
||||
// We can keep checkAuth to return the current user profile from the context
|
||||
// which is populated by the firebaseAuthMiddleware
|
||||
async function checkAuth() {
|
||||
const context = getContext<HonoVarTypes>();
|
||||
const user = context.get('user');
|
||||
|
||||
if (!user) {
|
||||
return { authenticated: false, user: null };
|
||||
}
|
||||
|
||||
return {
|
||||
authenticated: true,
|
||||
user: user
|
||||
};
|
||||
}
|
||||
|
||||
export const authMethods = {
|
||||
checkAuth,
|
||||
};
|
||||
@@ -1 +0,0 @@
|
||||
export const secret = "123_it-is-very-secret_123";
|
||||
@@ -1,344 +0,0 @@
|
||||
import {
|
||||
exposeTinyRpc,
|
||||
httpServerAdapter,
|
||||
validateFn,
|
||||
} from "@hiogawa/tiny-rpc";
|
||||
import { tinyassert } from "@hiogawa/utils";
|
||||
import { MiddlewareHandler, type Context, type Next } from "hono";
|
||||
import { getContext } from "hono/context-storage";
|
||||
// import { adminAuth } from "../../lib/firebaseAdmin";
|
||||
import { z } from "zod";
|
||||
import { authMethods } from "./auth";
|
||||
import { abortChunk, chunkedUpload, completeChunk, createPresignedUrls, imageContentTypes, nanoid, presignedPut, videoContentTypes } from "./s3_handle";
|
||||
// import { createElement } from "react";
|
||||
|
||||
let counter = 0;
|
||||
const listCourses = [
|
||||
{
|
||||
id: 1,
|
||||
title: "Lập trình Web Fullstack",
|
||||
description:
|
||||
"Học cách xây dựng ứng dụng web hoàn chỉnh từ frontend đến backend. Khóa học bao gồm HTML, CSS, JavaScript, React, Node.js và MongoDB.",
|
||||
category: "Lập trình",
|
||||
rating: 4.9,
|
||||
price: "1.200.000 VNĐ",
|
||||
icon: "fas fa-code",
|
||||
bgImg: "https://placehold.co/600x400/EEE/31343C?font=playfair-display&text=Web%20Fullstack",
|
||||
slug: "lap-trinh-web-fullstack",
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
title: "Phân tích dữ liệu với Python",
|
||||
description:
|
||||
"Khám phá sức mạnh của Python trong việc phân tích và trực quan hóa dữ liệu. Sử dụng Pandas, NumPy, Matplotlib và Seaborn.",
|
||||
category: "Phân tích dữ liệu",
|
||||
rating: 4.8,
|
||||
price: "900.000 VNĐ",
|
||||
icon: "fas fa-chart-bar",
|
||||
bgImg: "https://placehold.co/600x400/EEE/31343C?font=playfair-display&text=Data%20Analysis",
|
||||
slug: "phan-tich-du-lieu-voi-python",
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
title: "Thiết kế UI/UX chuyên nghiệp",
|
||||
description:
|
||||
"Học các nguyên tắc thiết kế giao diện và trải nghiệm người dùng hiện đại. Sử dụng Figma và Adobe XD.",
|
||||
category: "Thiết kế",
|
||||
rating: 4.7,
|
||||
price: "800.000 VNĐ",
|
||||
icon: "fas fa-paint-brush",
|
||||
bgImg: "https://placehold.co/600x400/EEE/31343C?font=playfair-display&text=UI/UX%20Design",
|
||||
slug: "thiet-ke-ui-ux-chuyen-nghiep",
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
title: "Machine Learning cơ bản",
|
||||
description:
|
||||
"Nhập môn Machine Learning với Python. Tìm hiểu về các thuật toán học máy cơ bản như Linear Regression, Logistic Regression, Decision Trees.",
|
||||
category: "AI/ML",
|
||||
rating: 4.6,
|
||||
price: "1.500.000 VNĐ",
|
||||
icon: "fas fa-brain",
|
||||
bgImg: "https://placehold.co/600x400/EEE/31343C?font=playfair-display&text=Machine%20Learning",
|
||||
slug: "machine-learning-co-ban",
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
title: "Digital Marketing toàn diện",
|
||||
description:
|
||||
"Chiến lược Marketing trên các nền tảng số. SEO, Google Ads, Facebook Ads và Content Marketing.",
|
||||
category: "Marketing",
|
||||
rating: 4.5,
|
||||
price: "700.000 VNĐ",
|
||||
icon: "fas fa-bullhorn",
|
||||
bgImg: "https://placehold.co/600x400/EEE/31343C?font=playfair-display&text=Digital%20Marketing",
|
||||
slug: "digital-marketing-toan-dien",
|
||||
},
|
||||
{
|
||||
id: 6,
|
||||
title: "Lập trình Mobile với Flutter",
|
||||
description:
|
||||
"Xây dựng ứng dụng di động đa nền tảng (iOS & Android) với Flutter và Dart.",
|
||||
category: "Lập trình",
|
||||
rating: 4.8,
|
||||
price: "1.100.000 VNĐ",
|
||||
icon: "fas fa-mobile-alt",
|
||||
bgImg: "https://placehold.co/600x400/EEE/31343C?font=playfair-display&text=Flutter%20Mobile",
|
||||
slug: "lap-trinh-mobile-voi-flutter",
|
||||
},
|
||||
{
|
||||
id: 7,
|
||||
title: "Tiếng Anh giao tiếp công sở",
|
||||
description:
|
||||
"Cải thiện kỹ năng giao tiếp tiếng Anh trong môi trường làm việc chuyên nghiệp.",
|
||||
category: "Ngoại ngữ",
|
||||
rating: 4.4,
|
||||
price: "600.000 VNĐ",
|
||||
icon: "fas fa-language",
|
||||
bgImg: "https://placehold.co/600x400/EEE/31343C?font=playfair-display&text=Business%20English",
|
||||
slug: "tieng-anh-giao-tiep-cong-so",
|
||||
},
|
||||
{
|
||||
id: 8,
|
||||
title: "Quản trị dự án Agile/Scrum",
|
||||
description:
|
||||
"Phương pháp quản lý dự án linh hoạt Agile và khung làm việc Scrum.",
|
||||
category: "Kỹ năng mềm",
|
||||
rating: 4.7,
|
||||
price: "950.000 VNĐ",
|
||||
icon: "fas fa-tasks",
|
||||
bgImg: "https://placehold.co/600x400/EEE/31343C?font=playfair-display&text=Agile%20Scrum",
|
||||
slug: "quan-tri-du-an-agile-scrum",
|
||||
},
|
||||
{
|
||||
id: 9,
|
||||
title: "Nhiếp ảnh cơ bản",
|
||||
description:
|
||||
"Làm chủ máy ảnh và nghệ thuật nhiếp ảnh. Bố cục, ánh sáng và chỉnh sửa ảnh.",
|
||||
category: "Nghệ thuật",
|
||||
rating: 4.9,
|
||||
price: "500.000 VNĐ",
|
||||
icon: "fas fa-camera",
|
||||
bgImg: "https://placehold.co/600x400/EEE/31343C?font=playfair-display&text=Photography",
|
||||
slug: "nhiep-anh-co-ban",
|
||||
},
|
||||
{
|
||||
id: 10,
|
||||
title: "Blockchain 101",
|
||||
description:
|
||||
"Hiểu về công nghệ Blockchain, Bitcoin, Ethereum và Smart Contracts.",
|
||||
category: "Công nghệ",
|
||||
rating: 4.6,
|
||||
price: "1.300.000 VNĐ",
|
||||
icon: "fas fa-link",
|
||||
bgImg: "https://placehold.co/600x400/EEE/31343C?font=playfair-display&text=Blockchain",
|
||||
slug: "blockchain-101",
|
||||
},
|
||||
{
|
||||
id: 11,
|
||||
title: "ReactJS Nâng cao",
|
||||
description:
|
||||
"Các kỹ thuật nâng cao trong React: Hooks, Context, Redux, Performance Optimization.",
|
||||
category: "Lập trình",
|
||||
rating: 4.9,
|
||||
price: "1.000.000 VNĐ",
|
||||
icon: "fas fa-code",
|
||||
bgImg: "https://placehold.co/600x400/EEE/31343C?font=playfair-display&text=Advanced%20React",
|
||||
slug: "reactjs-nang-cao",
|
||||
},
|
||||
{
|
||||
id: 12,
|
||||
title: "Viết Content Marketing thu hút",
|
||||
description:
|
||||
"Kỹ thuật viết bài chuẩn SEO, thu hút người đọc và tăng tỷ lệ chuyển đổi.",
|
||||
category: "Marketing",
|
||||
rating: 4.5,
|
||||
price: "550.000 VNĐ",
|
||||
icon: "fas fa-pen-nib",
|
||||
bgImg: "https://placehold.co/600x400/EEE/31343C?font=playfair-display&text=Content%20Marketing",
|
||||
slug: "viet-content-marketing",
|
||||
}
|
||||
];
|
||||
|
||||
const courseContent = [
|
||||
{
|
||||
id: 1,
|
||||
title: "Giới thiệu khóa học",
|
||||
type: "video",
|
||||
duration: "5:00",
|
||||
completed: true,
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
title: "Cài đặt môi trường",
|
||||
type: "video",
|
||||
duration: "15:00",
|
||||
completed: false,
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
title: "Kiến thức cơ bản",
|
||||
type: "video",
|
||||
duration: "25:00",
|
||||
completed: false,
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
title: "Bài tập thực hành 1",
|
||||
type: "quiz",
|
||||
duration: "10:00",
|
||||
completed: false,
|
||||
},
|
||||
];
|
||||
|
||||
const routes = {
|
||||
// define as a bare function
|
||||
checkId: (id: string) => {
|
||||
const context = getContext();
|
||||
console.log(context.req.raw.headers);
|
||||
return id === "good";
|
||||
},
|
||||
|
||||
checkIdThrow: (id: string) => {
|
||||
tinyassert(id === "good", "Invalid ID");
|
||||
return null;
|
||||
},
|
||||
|
||||
getCounter: () => {
|
||||
const context = getContext();
|
||||
console.log(context.get("jwtPayload"));
|
||||
return counter;
|
||||
},
|
||||
|
||||
// define with zod validation + input type inference
|
||||
incrementCounter: validateFn(z.object({ delta: z.number().default(1) }))(
|
||||
(input) => {
|
||||
// expectTypeOf(input).toEqualTypeOf<{ delta: number }>();
|
||||
counter += input.delta;
|
||||
return counter;
|
||||
}
|
||||
),
|
||||
|
||||
// access context
|
||||
components: async () => { },
|
||||
getHomeCourses: async () => {
|
||||
return listCourses.slice(0, 3);
|
||||
},
|
||||
getCourses: validateFn(
|
||||
z.object({
|
||||
page: z.number().default(1),
|
||||
limit: z.number().default(6),
|
||||
search: z.string().optional(),
|
||||
category: z.string().optional(),
|
||||
})
|
||||
)(async ({ page, limit, search, category }) => {
|
||||
let filtered = listCourses;
|
||||
|
||||
if (search) {
|
||||
const lowerSearch = search.toLowerCase();
|
||||
filtered = filtered.filter(
|
||||
(c) =>
|
||||
c.title.toLowerCase().includes(lowerSearch) ||
|
||||
c.description.toLowerCase().includes(lowerSearch)
|
||||
);
|
||||
}
|
||||
|
||||
if (category && category !== "All") {
|
||||
filtered = filtered.filter((c) => c.category === category);
|
||||
}
|
||||
|
||||
const start = (page - 1) * limit;
|
||||
const end = start + limit;
|
||||
const paginated = filtered.slice(start, end);
|
||||
|
||||
return {
|
||||
data: paginated,
|
||||
total: filtered.length,
|
||||
page,
|
||||
totalPages: Math.ceil(filtered.length / limit),
|
||||
};
|
||||
}),
|
||||
getCourseBySlug: validateFn(z.object({ slug: z.string() }))(async ({ slug }) => {
|
||||
const course = listCourses.find((c) => c.slug === slug);
|
||||
if (!course) {
|
||||
throw new Error("Course not found");
|
||||
}
|
||||
return course;
|
||||
}),
|
||||
getCourseContent: validateFn(z.object({ slug: z.string() }))(async ({ slug }) => {
|
||||
// In a real app, we would fetch content specific to the course
|
||||
return courseContent;
|
||||
}),
|
||||
presignedPut: validateFn(z.object({ fileName: z.string(), contentType: z.string().refine((val) => imageContentTypes.includes(val), { message: "Invalid content type" }) }))(async ({ fileName, contentType }) => {
|
||||
return await presignedPut(fileName, contentType);
|
||||
}),
|
||||
chunkedUpload: validateFn(z.object({ fileName: z.string(), contentType: z.string().refine((val) => videoContentTypes.includes(val), { message: "Invalid content type" }), fileSize: z.number().min(1024 * 10).max(3 * 1024 * 1024 * 1024).default(1024 * 256) }))(async ({ fileName, contentType, fileSize }) => {
|
||||
const key = nanoid() + "_" + fileName;
|
||||
const { UploadId } = await chunkedUpload(key, contentType, fileSize);
|
||||
const chunkSize = 1024 * 1024 * 20; // 20MB
|
||||
const presignedUrls = await createPresignedUrls({
|
||||
key,
|
||||
uploadId: UploadId!,
|
||||
totalParts: Math.ceil(fileSize / chunkSize),
|
||||
});
|
||||
return { uploadId: UploadId!, presignedUrls, chunkSize, key, totalParts: presignedUrls.length };
|
||||
}),
|
||||
completeChunk: validateFn(z.object({ key: z.string(), uploadId: z.string(), parts: z.array(z.object({ PartNumber: z.number(), ETag: z.string() })) }))(async ({ key, uploadId, parts }) => {
|
||||
await completeChunk(key, uploadId, parts);
|
||||
return { success: true };
|
||||
}),
|
||||
abortChunk: validateFn(z.object({ key: z.string(), uploadId: z.string() }))(async ({ key, uploadId }) => {
|
||||
await abortChunk(key, uploadId);
|
||||
return { success: true };
|
||||
}),
|
||||
...authMethods
|
||||
};
|
||||
export type RpcRoutes = typeof routes;
|
||||
export const endpoint = "/rpc";
|
||||
export const pathsForGET: (keyof typeof routes)[] = ["getCounter"];
|
||||
|
||||
export const firebaseAuthMiddleware: MiddlewareHandler = async (c, next) => {
|
||||
const publicPaths: (keyof typeof routes)[] = ["getHomeCourses", "getCourses", "getCourseBySlug", "getCourseContent"];
|
||||
const isPublic = publicPaths.some((path) => c.req.path.split("/").includes(path));
|
||||
c.set("isPublic", isPublic);
|
||||
|
||||
if (c.req.path !== endpoint && !c.req.path.startsWith(endpoint + "/") || isPublic) {
|
||||
return await next();
|
||||
}
|
||||
|
||||
const authHeader = c.req.header("Authorization");
|
||||
if (!authHeader || !authHeader.startsWith("Bearer ")) {
|
||||
// Option: return 401 or let it pass with no user?
|
||||
// Old logic seemed to require it for non-public paths.
|
||||
return c.json({ error: "Unauthorized" }, 401);
|
||||
}
|
||||
|
||||
const token = authHeader.split("Bearer ")[1];
|
||||
try {
|
||||
// const decodedToken = await adminAuth.verifyIdToken(token);
|
||||
// c.set("user", decodedToken);
|
||||
} catch (error) {
|
||||
console.error("Firebase Auth Error:", error);
|
||||
return c.json({ error: "Unauthorized" }, 401);
|
||||
}
|
||||
|
||||
return await next();
|
||||
}
|
||||
|
||||
export const rpcServer = async (c: Context, next: Next) => {
|
||||
if (c.req.path !== endpoint && !c.req.path.startsWith(endpoint + "/")) {
|
||||
return await next();
|
||||
}
|
||||
const cert = c.req.header()
|
||||
console.log("RPC Request Path:", c.req.raw.cf);
|
||||
// if (!cert) return c.text('Forbidden', 403)
|
||||
const handler = exposeTinyRpc({
|
||||
routes,
|
||||
adapter: httpServerAdapter({ endpoint }),
|
||||
});
|
||||
const res = await handler({ request: c.req.raw });
|
||||
if (res) {
|
||||
return res;
|
||||
}
|
||||
return await next();
|
||||
};
|
||||
@@ -1,198 +0,0 @@
|
||||
import {
|
||||
S3Client,
|
||||
ListBucketsCommand,
|
||||
ListObjectsV2Command,
|
||||
GetObjectCommand,
|
||||
PutObjectCommand,
|
||||
DeleteObjectCommand,
|
||||
CreateMultipartUploadCommand,
|
||||
UploadPartCommand,
|
||||
AbortMultipartUploadCommand,
|
||||
CompleteMultipartUploadCommand,
|
||||
ListPartsCommand,
|
||||
} from "@aws-sdk/client-s3";
|
||||
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
|
||||
import { createPresignedPost } from "@aws-sdk/s3-presigned-post";
|
||||
import { randomBytes } from "crypto";
|
||||
const urlAlphabet = 'useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict';
|
||||
|
||||
export function nanoid(size = 21) {
|
||||
let id = '';
|
||||
const bytes = randomBytes(size); // Node.js specific method
|
||||
|
||||
for (let i = 0; i < size; i++) {
|
||||
id += urlAlphabet[bytes[i] & 63];
|
||||
}
|
||||
|
||||
return id;
|
||||
}
|
||||
// createPresignedPost
|
||||
const S3 = new S3Client({
|
||||
region: "auto", // Required by SDK but not used by R2
|
||||
endpoint: `https://s3.cloudfly.vn`,
|
||||
credentials: {
|
||||
// accessKeyId: "Q3AM3UQ867SPQQA43P2F",
|
||||
// secretAccessKey: "Ik7nlCaUUCFOKDJAeSgFcbF5MEBGh9sVGBUrsUOp",
|
||||
accessKeyId: "BD707P5W8J5DHFPUKYZ6",
|
||||
secretAccessKey: "LTX7IizSDn28XGeQaHNID2fOtagfLc6L2henrP6P",
|
||||
},
|
||||
forcePathStyle: true,
|
||||
});
|
||||
// const S3 = new S3Client({
|
||||
// region: "auto", // Required by SDK but not used by R2
|
||||
// endpoint: `https://u.pipic.fun`,
|
||||
// credentials: {
|
||||
// // accessKeyId: "Q3AM3UQ867SPQQA43P2F",
|
||||
// // secretAccessKey: "Ik7nlCaUUCFOKDJAeSgFcbF5MEBGh9sVGBUrsUOp",
|
||||
// accessKeyId: "cdnadmin",
|
||||
// secretAccessKey: "D@tkhong9",
|
||||
// },
|
||||
// forcePathStyle: true,
|
||||
// });
|
||||
export const imageContentTypes = ["image/png", "image/jpg", "image/jpeg", "image/webp"];
|
||||
export const videoContentTypes = ["video/mp4", "video/webm", "video/ogg", "video/*"];
|
||||
const nanoId = () => {
|
||||
// return crypto.randomUUID().replace(/-/g, "").slice(0, 10);
|
||||
return ""
|
||||
}
|
||||
export async function presignedPut(fileName: string, contentType: string){
|
||||
if (!imageContentTypes.includes(contentType)) {
|
||||
throw new Error("Invalid content type");
|
||||
}
|
||||
const key = nanoId()+"_"+fileName;
|
||||
const url = await getSignedUrl(
|
||||
S3,
|
||||
new PutObjectCommand({
|
||||
Bucket: "tmp",
|
||||
Key: key,
|
||||
ContentType: contentType,
|
||||
CacheControl: "public, max-age=31536000, immutable",
|
||||
// ContentLength: 31457280, // Max 30MB
|
||||
// ACL: "public-read", // Uncomment if you want the object to be publicly readable
|
||||
}),
|
||||
{ expiresIn: 600 } // URL valid for 10 minutes
|
||||
);
|
||||
return { url, key };
|
||||
}
|
||||
export async function createPresignedUrls({
|
||||
key,
|
||||
uploadId,
|
||||
totalParts,
|
||||
expiresIn = 60 * 15, // 15 phút
|
||||
}: {
|
||||
key: string;
|
||||
uploadId: string;
|
||||
totalParts: number;
|
||||
expiresIn?: number;
|
||||
}) {
|
||||
const urls = [];
|
||||
|
||||
for (let partNumber = 1; partNumber <= totalParts; partNumber++) {
|
||||
const command = new UploadPartCommand({
|
||||
Bucket: "tmp",
|
||||
Key: key,
|
||||
UploadId: uploadId,
|
||||
PartNumber: partNumber,
|
||||
});
|
||||
|
||||
const url = await getSignedUrl(S3, command, {
|
||||
expiresIn,
|
||||
});
|
||||
|
||||
urls.push({
|
||||
partNumber,
|
||||
url,
|
||||
});
|
||||
}
|
||||
|
||||
return urls;
|
||||
}
|
||||
export async function chunkedUpload(Key: string, contentType: string, fileSize: number) {
|
||||
// lớn hơn 3gb thì cút
|
||||
if (fileSize > 3 * 1024 * 1024 * 1024) {
|
||||
throw new Error("File size exceeds 3GB");
|
||||
}
|
||||
// CreateMultipartUploadCommand
|
||||
const uploadParams = {
|
||||
Bucket: "tmp",
|
||||
Key,
|
||||
ContentType: contentType,
|
||||
CacheControl: "public, max-age=31536000, immutable",
|
||||
};
|
||||
let data = await S3.send(new CreateMultipartUploadCommand(uploadParams));
|
||||
return data;
|
||||
}
|
||||
export async function abortChunk(key: string, uploadId: string) {
|
||||
await S3.send(
|
||||
new AbortMultipartUploadCommand({
|
||||
Bucket: "tmp",
|
||||
Key: key,
|
||||
UploadId: uploadId,
|
||||
})
|
||||
);
|
||||
}
|
||||
export async function completeChunk(key: string, uploadId: string, parts: { ETag: string; PartNumber: number }[]) {
|
||||
const listed = await S3.send(
|
||||
new ListPartsCommand({
|
||||
Bucket: "tmp",
|
||||
Key: key,
|
||||
UploadId: uploadId,
|
||||
})
|
||||
);
|
||||
if (!listed.Parts || listed.Parts.length !== parts.length) {
|
||||
throw new Error("Not all parts have been uploaded");
|
||||
}
|
||||
await S3.send(
|
||||
new CompleteMultipartUploadCommand({
|
||||
Bucket: "tmp",
|
||||
Key: key,
|
||||
UploadId: uploadId,
|
||||
MultipartUpload: {
|
||||
Parts: parts.sort((a, b) => a.PartNumber - b.PartNumber),
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
export async function deleteObject(bucketName: string, objectKey: string) {
|
||||
await S3.send(
|
||||
new DeleteObjectCommand({
|
||||
Bucket: bucketName,
|
||||
Key: objectKey,
|
||||
})
|
||||
);
|
||||
}
|
||||
export async function listBuckets() {
|
||||
const data = await S3.send(new ListBucketsCommand({}));
|
||||
return data.Buckets;
|
||||
}
|
||||
export async function listObjects(bucketName: string) {
|
||||
const data = await S3.send(
|
||||
new ListObjectsV2Command({
|
||||
Bucket: bucketName,
|
||||
})
|
||||
);
|
||||
return data.Contents;
|
||||
}
|
||||
export async function generateUploadForm(fileName: string, contentType: string) {
|
||||
if (!imageContentTypes.includes(contentType)) {
|
||||
throw new Error("Invalid content type");
|
||||
}
|
||||
return await createPresignedPost(S3, {
|
||||
Bucket: "tmp",
|
||||
Key: nanoId()+"_"+fileName,
|
||||
Expires: 10 * 60, // URL valid for 10 minutes
|
||||
Conditions: [
|
||||
["starts-with", "$Content-Type", contentType],
|
||||
["content-length-range", 0, 31457280], // Max 30MB
|
||||
],
|
||||
});
|
||||
}
|
||||
// generateUploadUrl("tmp", "cat.png", "image/png").then(console.log);
|
||||
export async function createDownloadUrl(key: string): Promise<string> {
|
||||
const url = await getSignedUrl(
|
||||
S3,
|
||||
new GetObjectCommand({ Bucket: "tmp", Key: key }),
|
||||
{ expiresIn: 600 } // 600 giây = 10 phút
|
||||
);
|
||||
return url;
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ref, computed } from 'vue';
|
||||
import { computed, ref } from 'vue';
|
||||
|
||||
export interface QueueItem {
|
||||
id: string;
|
||||
@@ -12,10 +12,19 @@ export interface QueueItem {
|
||||
thumbnail?: string;
|
||||
file?: File; // Keep reference to file for local uploads
|
||||
url?: string; // Keep reference to url for remote uploads
|
||||
// Upload chunk tracking
|
||||
activeChunks?: number;
|
||||
uploadedUrls?: string[];
|
||||
cancelled?: boolean;
|
||||
}
|
||||
|
||||
const items = ref<QueueItem[]>([]);
|
||||
|
||||
// Chunk upload configuration
|
||||
const CHUNK_SIZE = 90 * 1024 * 1024; // 90MB per chunk
|
||||
const MAX_PARALLEL = 3;
|
||||
const MAX_RETRY = 3;
|
||||
|
||||
export function useUploadQueue() {
|
||||
|
||||
const addFiles = (files: FileList) => {
|
||||
@@ -23,13 +32,16 @@ export function useUploadQueue() {
|
||||
id: Math.random().toString(36).substring(2, 9),
|
||||
name: file.name,
|
||||
type: 'local',
|
||||
status: 'pending', // Start as pending
|
||||
status: 'pending',
|
||||
progress: 0,
|
||||
uploaded: '0 MB',
|
||||
total: formatSize(file.size),
|
||||
speed: '0 MB/s',
|
||||
file: file,
|
||||
thumbnail: undefined // We could generate a thumbnail here if needed
|
||||
thumbnail: undefined,
|
||||
activeChunks: 0,
|
||||
uploadedUrls: [],
|
||||
cancelled: false
|
||||
}));
|
||||
|
||||
items.value.push(...newItems);
|
||||
@@ -40,32 +52,44 @@ export function useUploadQueue() {
|
||||
id: Math.random().toString(36).substring(2, 9),
|
||||
name: url.split('/').pop() || 'Remote File',
|
||||
type: 'remote',
|
||||
status: 'fetching', // Remote URLs start fetching immediately or pending? User said "khi nao nhan upload". Let's use pending.
|
||||
status: 'pending',
|
||||
progress: 0,
|
||||
uploaded: '0 MB',
|
||||
total: 'Unknown',
|
||||
speed: '0 MB/s',
|
||||
url: url
|
||||
url: url,
|
||||
activeChunks: 0,
|
||||
uploadedUrls: [],
|
||||
cancelled: false
|
||||
}));
|
||||
|
||||
// Override status to pending for consistency with user request
|
||||
newItems.forEach(i => i.status = 'pending');
|
||||
|
||||
items.value.push(...newItems);
|
||||
};
|
||||
|
||||
const removeItem = (id: string) => {
|
||||
const item = items.value.find(i => i.id === id);
|
||||
if (item) {
|
||||
item.cancelled = true;
|
||||
}
|
||||
const index = items.value.findIndex(item => item.id === id);
|
||||
if (index !== -1) {
|
||||
items.value.splice(index, 1);
|
||||
}
|
||||
};
|
||||
|
||||
const cancelItem = (id: string) => {
|
||||
const item = items.value.find(i => i.id === id);
|
||||
if (item) {
|
||||
item.cancelled = true;
|
||||
item.status = 'error';
|
||||
}
|
||||
};
|
||||
|
||||
const startQueue = () => {
|
||||
items.value.forEach(item => {
|
||||
if (item.status === 'pending') {
|
||||
if (item.type === 'local') {
|
||||
startMockUpload(item.id);
|
||||
startChunkUpload(item.id);
|
||||
} else {
|
||||
startMockRemoteFetch(item.id);
|
||||
}
|
||||
@@ -73,42 +97,165 @@ export function useUploadQueue() {
|
||||
});
|
||||
};
|
||||
|
||||
// Mock Upload Logic
|
||||
const startMockUpload = (id: string) => {
|
||||
// Real Chunk Upload Logic
|
||||
const startChunkUpload = async (id: string) => {
|
||||
const item = items.value.find(i => i.id === id);
|
||||
if (!item) return;
|
||||
if (!item || !item.file) return;
|
||||
|
||||
item.status = 'uploading';
|
||||
let progress = 0;
|
||||
const totalSize = item.file ? item.file.size : 1024 * 1024 * 50; // Default 50MB if unknown
|
||||
item.activeChunks = 0;
|
||||
item.uploadedUrls = [];
|
||||
|
||||
// Random speed between 1MB/s and 5MB/s
|
||||
const speedBytesPerStep = (1024 * 1024) + Math.random() * (1024 * 1024 * 4);
|
||||
const file = item.file;
|
||||
const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
|
||||
const progressMap = new Map<number, number>(); // chunk index -> uploaded bytes
|
||||
const queue: number[] = Array.from({ length: totalChunks }, (_, i) => i);
|
||||
|
||||
const interval = setInterval(() => {
|
||||
if (progress >= 100) {
|
||||
clearInterval(interval);
|
||||
item.status = 'complete';
|
||||
item.progress = 100;
|
||||
item.uploaded = item.total;
|
||||
return;
|
||||
const updateProgress = () => {
|
||||
let totalUploaded = 0;
|
||||
progressMap.forEach(value => {
|
||||
totalUploaded += value;
|
||||
});
|
||||
const percent = Math.min((totalUploaded / file.size) * 100, 100);
|
||||
item.progress = parseFloat(percent.toFixed(1));
|
||||
item.uploaded = formatSize(totalUploaded);
|
||||
|
||||
// Calculate speed (simplified)
|
||||
const currentSpeed = item.activeChunks ? item.activeChunks * 2 * 1024 * 1024 : 0;
|
||||
item.speed = formatSize(currentSpeed) + '/s';
|
||||
};
|
||||
|
||||
const processQueue = async () => {
|
||||
if (item.cancelled) return;
|
||||
|
||||
const activePromises: Promise<void>[] = [];
|
||||
|
||||
while ((item.activeChunks || 0) < MAX_PARALLEL && queue.length > 0) {
|
||||
const index = queue.shift()!;
|
||||
item.activeChunks = (item.activeChunks || 0) + 1;
|
||||
|
||||
const promise = uploadChunk(index, file, progressMap, updateProgress, item)
|
||||
.then(() => {
|
||||
item.activeChunks = (item.activeChunks || 0) - 1;
|
||||
});
|
||||
activePromises.push(promise);
|
||||
}
|
||||
|
||||
// Increment progress randomly
|
||||
const increment = Math.random() * 5 + 1; // 1-6% increment
|
||||
progress = Math.min(progress + increment, 100);
|
||||
|
||||
item.progress = Math.floor(progress);
|
||||
|
||||
// Calculate uploaded size string
|
||||
const currentBytes = (progress / 100) * totalSize;
|
||||
item.uploaded = formatSize(currentBytes);
|
||||
|
||||
// Re-randomize speed for realism
|
||||
const currentSpeed = (1024 * 1024) + Math.random() * (1024 * 1024 * 2);
|
||||
item.speed = formatSize(currentSpeed) + '/s';
|
||||
if (activePromises.length > 0) {
|
||||
await Promise.all(activePromises);
|
||||
await processQueue();
|
||||
}
|
||||
};
|
||||
|
||||
}, 500);
|
||||
try {
|
||||
await processQueue();
|
||||
|
||||
if (!item.cancelled) {
|
||||
item.status = 'processing';
|
||||
await completeUpload(item);
|
||||
}
|
||||
} catch (error) {
|
||||
item.status = 'error';
|
||||
console.error('Upload failed:', error);
|
||||
}
|
||||
};
|
||||
|
||||
const uploadChunk = (
|
||||
index: number,
|
||||
file: File,
|
||||
progressMap: Map<number, number>,
|
||||
updateProgress: () => void,
|
||||
item: QueueItem
|
||||
): Promise<void> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
let retry = 0;
|
||||
|
||||
const attempt = () => {
|
||||
if (item.cancelled) return resolve();
|
||||
|
||||
const start = index * CHUNK_SIZE;
|
||||
const end = Math.min(start + CHUNK_SIZE, file.size);
|
||||
const chunk = file.slice(start, end);
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('file', chunk, file.name);
|
||||
|
||||
const xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', 'https://tmpfiles.org/api/v1/upload');
|
||||
|
||||
xhr.upload.onprogress = (e) => {
|
||||
if (e.lengthComputable) {
|
||||
progressMap.set(index, e.loaded);
|
||||
updateProgress();
|
||||
}
|
||||
};
|
||||
|
||||
xhr.onload = function() {
|
||||
if (xhr.status === 200) {
|
||||
try {
|
||||
const res = JSON.parse(xhr.responseText);
|
||||
if (res.status === 'success') {
|
||||
progressMap.set(index, chunk.size);
|
||||
if (item.uploadedUrls) {
|
||||
item.uploadedUrls[index] = res.data.url;
|
||||
}
|
||||
updateProgress();
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
} catch {
|
||||
handleError();
|
||||
}
|
||||
}
|
||||
handleError();
|
||||
};
|
||||
|
||||
xhr.onerror = handleError;
|
||||
|
||||
function handleError() {
|
||||
retry++;
|
||||
if (retry <= MAX_RETRY) {
|
||||
setTimeout(attempt, 2000);
|
||||
} else {
|
||||
item.status = 'error';
|
||||
reject(new Error(`Failed to upload chunk ${index + 1}`));
|
||||
}
|
||||
};
|
||||
|
||||
xhr.send(formData);
|
||||
};
|
||||
|
||||
attempt();
|
||||
});
|
||||
};
|
||||
|
||||
const completeUpload = async (item: QueueItem) => {
|
||||
if (!item.file || !item.uploadedUrls) return;
|
||||
|
||||
try {
|
||||
const response = await fetch('/merge', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
filename: item.file.name,
|
||||
chunks: item.uploadedUrls
|
||||
})
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Merge failed');
|
||||
}
|
||||
|
||||
item.status = 'complete';
|
||||
item.progress = 100;
|
||||
item.uploaded = item.total;
|
||||
item.speed = '0 MB/s';
|
||||
} catch (error) {
|
||||
item.status = 'error';
|
||||
console.error('Merge failed:', error);
|
||||
}
|
||||
};
|
||||
|
||||
// Mock Remote Fetch Logic
|
||||
@@ -116,11 +263,9 @@ export function useUploadQueue() {
|
||||
const item = items.value.find(i => i.id === id);
|
||||
if (!item) return;
|
||||
|
||||
item.status = 'fetching'; // Update status to fetching
|
||||
item.status = 'fetching';
|
||||
|
||||
// Remote fetch takes some time then completes
|
||||
setTimeout(() => {
|
||||
// Switch to uploading/processing phase if we wanted, or just finish
|
||||
item.status = 'complete';
|
||||
item.progress = 100;
|
||||
}, 3000 + Math.random() * 3000);
|
||||
@@ -156,6 +301,7 @@ export function useUploadQueue() {
|
||||
addFiles,
|
||||
addRemoteUrls,
|
||||
removeItem,
|
||||
cancelItem,
|
||||
startQueue,
|
||||
totalSize,
|
||||
completeCount,
|
||||
|
||||
@@ -12,6 +12,8 @@ import { createApp } from './main';
|
||||
import { useAuthStore } from './stores/auth';
|
||||
// @ts-ignore
|
||||
import Base from '@primevue/core/base';
|
||||
import { baseAPIURL } from './api/httpClientAdapter.server';
|
||||
import { createManifest, getListFiles, saveManifest, validateChunkUrls } from './server/modules/merge';
|
||||
const app = new Hono()
|
||||
const defaultNames = ['primitive', 'semantic', 'global', 'base', 'ripple-directive']
|
||||
// app.use(renderer)
|
||||
@@ -31,7 +33,7 @@ app.use(cors(), async (c, next) => {
|
||||
return await next()
|
||||
}
|
||||
const url = new URL(c.req.url)
|
||||
url.host = 'api.pipic.fun'
|
||||
url.host = baseAPIURL.replace(/^https?:\/\//, '')
|
||||
url.protocol = 'https:'
|
||||
url.pathname = path.replace(/^\/r/, '') || '/'
|
||||
url.port = ''
|
||||
@@ -53,6 +55,53 @@ app.use(cors(), async (c, next) => {
|
||||
app.get("/.well-known/*", (c) => {
|
||||
return c.json({ ok: true });
|
||||
});
|
||||
app.post('/merge', async (c, next) => {
|
||||
const headers = new Headers(c.req.header());
|
||||
headers.delete("host");
|
||||
headers.delete("connection");
|
||||
return fetch(`${baseAPIURL}/me`, {
|
||||
method: 'GET',
|
||||
headers: headers,
|
||||
credentials: 'include'
|
||||
}).then(res => res.json()).then((r) => {
|
||||
if (r.data?.user) {
|
||||
return next();
|
||||
}
|
||||
else {
|
||||
throw new Error("Unauthorized");
|
||||
}}).catch(() => {
|
||||
return c.json({ error: "Unauthorized" }, 401);
|
||||
});
|
||||
}, async (c) => {
|
||||
try {
|
||||
const body = await c.req.json()
|
||||
const { filename, chunks } = body
|
||||
if (!filename || !Array.isArray(chunks) || chunks.length === 0) {
|
||||
return c.json({ error: 'invalid payload' }, 400)
|
||||
}
|
||||
const hostError = validateChunkUrls(chunks)
|
||||
if (hostError) return c.json({ error: hostError }, 400)
|
||||
|
||||
const manifest = createManifest(filename, chunks)
|
||||
await saveManifest(manifest)
|
||||
|
||||
return c.json({
|
||||
status: 'ok',
|
||||
id: manifest.id,
|
||||
filename: manifest.filename,
|
||||
total_parts: manifest.total_parts,
|
||||
})
|
||||
} catch (e: any) {
|
||||
return c.json({ error: e?.message ?? String(e) }, 500)
|
||||
}
|
||||
})
|
||||
app.get('/manifest/:id', async (c) => {
|
||||
const manifest = await getListFiles()
|
||||
if (!manifest) {
|
||||
return c.json({ error: "Manifest not found" }, 404)
|
||||
}
|
||||
return c.json(manifest)
|
||||
})
|
||||
app.get("*", async (c) => {
|
||||
const nonce = crypto.randomUUID();
|
||||
const url = new URL(c.req.url);
|
||||
|
||||
77
src/lib/swr/cache/adapters/localStorage.ts
vendored
77
src/lib/swr/cache/adapters/localStorage.ts
vendored
@@ -1,77 +0,0 @@
|
||||
import SWRVCache, { type ICacheItem } from '..'
|
||||
import type { IKey } from '../../types'
|
||||
|
||||
/**
|
||||
* LocalStorage cache adapter for swrv data cache.
|
||||
* https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage
|
||||
*/
|
||||
export default class LocalStorageCache extends SWRVCache<any> {
|
||||
private STORAGE_KEY
|
||||
|
||||
constructor (key = 'swrv', ttl = 0) {
|
||||
super(ttl)
|
||||
this.STORAGE_KEY = key
|
||||
}
|
||||
|
||||
private encode (storage: any) { return JSON.stringify(storage) }
|
||||
private decode (storage: any) { return JSON.parse(storage) }
|
||||
|
||||
get (k: IKey): ICacheItem<IKey> {
|
||||
const item = localStorage.getItem(this.STORAGE_KEY)
|
||||
if (item) {
|
||||
const _key = this.serializeKey(k)
|
||||
const itemParsed: ICacheItem<any> = JSON.parse(item)[_key]
|
||||
|
||||
if (itemParsed?.expiresAt === null) {
|
||||
itemParsed.expiresAt = Infinity // localStorage sets Infinity to 'null'
|
||||
}
|
||||
|
||||
return itemParsed
|
||||
}
|
||||
|
||||
return undefined as any
|
||||
}
|
||||
|
||||
set (k: string, v: any, ttl: number) {
|
||||
let payload = {}
|
||||
const _key = this.serializeKey(k)
|
||||
const timeToLive = ttl || this.ttl
|
||||
const storage = localStorage.getItem(this.STORAGE_KEY)
|
||||
const now = Date.now()
|
||||
const item = {
|
||||
data: v,
|
||||
createdAt: now,
|
||||
expiresAt: timeToLive ? now + timeToLive : Infinity
|
||||
}
|
||||
|
||||
if (storage) {
|
||||
payload = this.decode(storage)
|
||||
(payload as any)[_key] = item
|
||||
} else {
|
||||
payload = { [_key]: item }
|
||||
}
|
||||
|
||||
this.dispatchExpire(timeToLive, item, _key)
|
||||
localStorage.setItem(this.STORAGE_KEY, this.encode(payload))
|
||||
}
|
||||
|
||||
dispatchExpire (ttl: number, item: any, serializedKey: string) {
|
||||
ttl && setTimeout(() => {
|
||||
const current = Date.now()
|
||||
const hasExpired = current >= item.expiresAt
|
||||
if (hasExpired) this.delete(serializedKey)
|
||||
}, ttl)
|
||||
}
|
||||
|
||||
delete (serializedKey: string) {
|
||||
const storage = localStorage.getItem(this.STORAGE_KEY)
|
||||
let payload = {} as Record<string, any>
|
||||
|
||||
if (storage) {
|
||||
payload = this.decode(storage)
|
||||
delete payload[serializedKey]
|
||||
}
|
||||
|
||||
localStorage.setItem(this.STORAGE_KEY, this.encode(payload))
|
||||
}
|
||||
}
|
||||
72
src/lib/swr/cache/index.ts
vendored
72
src/lib/swr/cache/index.ts
vendored
@@ -1,72 +0,0 @@
|
||||
import type { IKey } from '../types'
|
||||
import hash from '../lib/hash'
|
||||
export interface ICacheItem<Data> {
|
||||
data: Data,
|
||||
createdAt: number,
|
||||
expiresAt: number
|
||||
}
|
||||
|
||||
function serializeKeyDefault (key: IKey): string {
|
||||
if (typeof key === 'function') {
|
||||
try {
|
||||
key = key()
|
||||
} catch (err) {
|
||||
// dependencies not ready
|
||||
key = ''
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(key)) {
|
||||
key = hash(key)
|
||||
} else {
|
||||
// convert null to ''
|
||||
key = String(key || '')
|
||||
}
|
||||
|
||||
return key
|
||||
}
|
||||
|
||||
export default class SWRVCache<CacheData> {
|
||||
protected ttl: number
|
||||
private items?: Map<string, ICacheItem<CacheData>>
|
||||
|
||||
constructor (ttl = 0) {
|
||||
this.items = new Map()
|
||||
this.ttl = ttl
|
||||
}
|
||||
|
||||
serializeKey (key: IKey): string {
|
||||
return serializeKeyDefault(key)
|
||||
}
|
||||
|
||||
get (k: string): ICacheItem<CacheData> {
|
||||
const _key = this.serializeKey(k)
|
||||
return this.items!.get(_key)!
|
||||
}
|
||||
|
||||
set (k: string, v: any, ttl: number) {
|
||||
const _key = this.serializeKey(k)
|
||||
const timeToLive = ttl || this.ttl
|
||||
const now = Date.now()
|
||||
const item = {
|
||||
data: v,
|
||||
createdAt: now,
|
||||
expiresAt: timeToLive ? now + timeToLive : Infinity
|
||||
}
|
||||
|
||||
this.dispatchExpire(timeToLive, item, _key)
|
||||
this.items!.set(_key, item)
|
||||
}
|
||||
|
||||
dispatchExpire (ttl: number, item: any, serializedKey: string) {
|
||||
ttl && setTimeout(() => {
|
||||
const current = Date.now()
|
||||
const hasExpired = current >= item.expiresAt
|
||||
if (hasExpired) this.delete(serializedKey)
|
||||
}, ttl)
|
||||
}
|
||||
|
||||
delete (serializedKey: string) {
|
||||
this.items!.delete(serializedKey)
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
import SWRVCache from './cache'
|
||||
import useSWRV, { mutate } from './use-swrv'
|
||||
|
||||
export {
|
||||
type IConfig
|
||||
} from './types'
|
||||
export { mutate, SWRVCache }
|
||||
export default useSWRV
|
||||
@@ -1,44 +0,0 @@
|
||||
// From https://github.com/vercel/swr/blob/master/src/libs/hash.ts
|
||||
// use WeakMap to store the object->key mapping
|
||||
// so the objects can be garbage collected.
|
||||
// WeakMap uses a hashtable under the hood, so the lookup
|
||||
// complexity is almost O(1).
|
||||
const table = new WeakMap()
|
||||
|
||||
// counter of the key
|
||||
let counter = 0
|
||||
|
||||
// hashes an array of objects and returns a string
|
||||
export default function hash (args: any[]): string {
|
||||
if (!args.length) return ''
|
||||
let key = 'arg'
|
||||
for (let i = 0; i < args.length; ++i) {
|
||||
let _hash
|
||||
if (
|
||||
args[i] === null ||
|
||||
(typeof args[i] !== 'object' && typeof args[i] !== 'function')
|
||||
) {
|
||||
// need to consider the case that args[i] is a string:
|
||||
// args[i] _hash
|
||||
// "undefined" -> '"undefined"'
|
||||
// undefined -> 'undefined'
|
||||
// 123 -> '123'
|
||||
// null -> 'null'
|
||||
// "null" -> '"null"'
|
||||
if (typeof args[i] === 'string') {
|
||||
_hash = '"' + args[i] + '"'
|
||||
} else {
|
||||
_hash = String(args[i])
|
||||
}
|
||||
} else {
|
||||
if (!table.has(args[i])) {
|
||||
_hash = counter
|
||||
table.set(args[i], counter++)
|
||||
} else {
|
||||
_hash = table.get(args[i])
|
||||
}
|
||||
}
|
||||
key += '@' + _hash
|
||||
}
|
||||
return key
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
function isOnline (): boolean {
|
||||
if (typeof navigator.onLine !== 'undefined') {
|
||||
return navigator.onLine
|
||||
}
|
||||
// always assume it's online
|
||||
return true
|
||||
}
|
||||
|
||||
function isDocumentVisible (): boolean {
|
||||
if (
|
||||
typeof document !== 'undefined' &&
|
||||
typeof document.visibilityState !== 'undefined'
|
||||
) {
|
||||
return document.visibilityState !== 'hidden'
|
||||
}
|
||||
// always assume it's visible
|
||||
return true
|
||||
}
|
||||
|
||||
const fetcher = (url: string | Request) => fetch(url).then(res => res.json())
|
||||
|
||||
export default {
|
||||
isOnline,
|
||||
isDocumentVisible,
|
||||
fetcher
|
||||
}
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
import type { Ref, WatchSource } from 'vue'
|
||||
import SWRVCache from './cache'
|
||||
import LocalStorageCache from './cache/adapters/localStorage'
|
||||
|
||||
export type fetcherFn<Data> = (...args: any) => Data | Promise<Data>
|
||||
|
||||
export interface IConfig<
|
||||
Data = any,
|
||||
Fn extends fetcherFn<Data> = fetcherFn<Data>
|
||||
> {
|
||||
refreshInterval?: number
|
||||
cache?: LocalStorageCache | SWRVCache<any>
|
||||
dedupingInterval?: number
|
||||
ttl?: number
|
||||
serverTTL?: number
|
||||
revalidateOnFocus?: boolean
|
||||
revalidateDebounce?: number
|
||||
shouldRetryOnError?: boolean
|
||||
errorRetryInterval?: number
|
||||
errorRetryCount?: number
|
||||
fetcher?: Fn,
|
||||
isOnline?: () => boolean
|
||||
isDocumentVisible?: () => boolean
|
||||
}
|
||||
|
||||
export interface revalidateOptions {
|
||||
shouldRetryOnError?: boolean,
|
||||
errorRetryCount?: number,
|
||||
forceRevalidate?: boolean,
|
||||
}
|
||||
|
||||
export interface IResponse<Data = any, Error = any> {
|
||||
data: Ref<Data | undefined>
|
||||
error: Ref<Error | undefined>
|
||||
isValidating: Ref<boolean>
|
||||
isLoading: Ref<boolean>
|
||||
mutate: (data?: fetcherFn<Data>, opts?: revalidateOptions) => Promise<void>
|
||||
}
|
||||
|
||||
export type keyType = string | any[] | null | undefined
|
||||
|
||||
export type IKey = keyType | WatchSource<keyType>
|
||||
@@ -1,470 +0,0 @@
|
||||
/** ____
|
||||
*--------------/ \.------------------/
|
||||
* / swrv \. / //
|
||||
* / / /\. / //
|
||||
* / _____/ / \. /
|
||||
* / / ____/ . \. /
|
||||
* / \ \_____ \. /
|
||||
* / . \_____ \ \ / //
|
||||
* \ _____/ / ./ / //
|
||||
* \ / _____/ ./ /
|
||||
* \ / / . ./ /
|
||||
* \ / / ./ /
|
||||
* . \/ ./ / //
|
||||
* \ ./ / //
|
||||
* \.. / /
|
||||
* . ||| /
|
||||
* ||| /
|
||||
* . ||| / //
|
||||
* ||| / //
|
||||
* ||| /
|
||||
*/
|
||||
import { tinyassert } from "@hiogawa/utils";
|
||||
import {
|
||||
getCurrentInstance,
|
||||
inject,
|
||||
isReadonly,
|
||||
isRef,
|
||||
// isRef,
|
||||
onMounted,
|
||||
onServerPrefetch,
|
||||
onUnmounted,
|
||||
reactive,
|
||||
ref,
|
||||
toRefs,
|
||||
useSSRContext,
|
||||
watch,
|
||||
type FunctionPlugin
|
||||
} from 'vue';
|
||||
import SWRVCache from './cache';
|
||||
import webPreset from './lib/web-preset';
|
||||
import type { IConfig, IKey, IResponse, fetcherFn, revalidateOptions } from './types';
|
||||
|
||||
type StateRef<Data, Error> = {
|
||||
data: Data, error: Error, isValidating: boolean, isLoading: boolean, revalidate: Function, key: any
|
||||
};
|
||||
|
||||
const DATA_CACHE = new SWRVCache<Omit<IResponse, 'mutate'>>()
|
||||
const REF_CACHE = new SWRVCache<StateRef<any, any>[]>()
|
||||
const PROMISES_CACHE = new SWRVCache<Omit<IResponse, 'mutate'>>()
|
||||
|
||||
const defaultConfig: IConfig = {
|
||||
cache: DATA_CACHE,
|
||||
refreshInterval: 0,
|
||||
ttl: 0,
|
||||
serverTTL: 1000,
|
||||
dedupingInterval: 2000,
|
||||
revalidateOnFocus: true,
|
||||
revalidateDebounce: 0,
|
||||
shouldRetryOnError: true,
|
||||
errorRetryInterval: 5000,
|
||||
errorRetryCount: 5,
|
||||
fetcher: webPreset.fetcher,
|
||||
isOnline: webPreset.isOnline,
|
||||
isDocumentVisible: webPreset.isDocumentVisible
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache the refs for later revalidation
|
||||
*/
|
||||
function setRefCache(key: string, theRef: StateRef<any, any>, ttl: number) {
|
||||
const refCacheItem = REF_CACHE.get(key)
|
||||
if (refCacheItem) {
|
||||
refCacheItem.data.push(theRef)
|
||||
} else {
|
||||
// #51 ensures ref cache does not evict too soon
|
||||
const gracePeriod = 5000
|
||||
REF_CACHE.set(key, [theRef], ttl > 0 ? ttl + gracePeriod : ttl)
|
||||
}
|
||||
}
|
||||
|
||||
function onErrorRetry(revalidate: (any: any, opts: revalidateOptions) => void, errorRetryCount: number, config: IConfig): void {
|
||||
if (!(config as any).isDocumentVisible()) {
|
||||
return
|
||||
}
|
||||
|
||||
if (config.errorRetryCount !== undefined && errorRetryCount > config.errorRetryCount) {
|
||||
return
|
||||
}
|
||||
|
||||
const count = Math.min(errorRetryCount || 0, (config as any).errorRetryCount)
|
||||
const timeout = count * (config as any).errorRetryInterval
|
||||
setTimeout(() => {
|
||||
revalidate(null, { errorRetryCount: count + 1, shouldRetryOnError: true })
|
||||
}, timeout)
|
||||
}
|
||||
|
||||
/**
|
||||
* Main mutation function for receiving data from promises to change state and
|
||||
* set data cache
|
||||
*/
|
||||
const mutate = async <Data>(key: string, res: Promise<Data> | Data, cache = DATA_CACHE, ttl = defaultConfig.ttl) => {
|
||||
let data, error, isValidating
|
||||
|
||||
if (isPromise(res)) {
|
||||
try {
|
||||
data = await res
|
||||
} catch (err) {
|
||||
error = err
|
||||
}
|
||||
} else {
|
||||
data = res
|
||||
}
|
||||
|
||||
// eslint-disable-next-line prefer-const
|
||||
isValidating = false
|
||||
|
||||
const newData = { data, error, isValidating }
|
||||
if (typeof data !== 'undefined') {
|
||||
try {
|
||||
cache.set(key, newData, Number(ttl))
|
||||
} catch (err) {
|
||||
console.error('swrv(mutate): failed to set cache', err)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Revalidate all swrv instances with new data
|
||||
*/
|
||||
const stateRef = REF_CACHE.get(key)
|
||||
if (stateRef && stateRef.data.length) {
|
||||
// This filter fixes #24 race conditions to only update ref data of current
|
||||
// key, while data cache will continue to be updated if revalidation is
|
||||
// fired
|
||||
let refs = stateRef.data.filter(r => r.key === key)
|
||||
|
||||
refs.forEach((r, idx) => {
|
||||
if (typeof newData.data !== 'undefined') {
|
||||
r.data = newData.data
|
||||
}
|
||||
r.error = newData.error
|
||||
r.isValidating = newData.isValidating
|
||||
r.isLoading = newData.isValidating
|
||||
|
||||
const isLast = idx === refs.length - 1
|
||||
if (!isLast) {
|
||||
// Clean up refs that belonged to old keys
|
||||
delete refs[idx]
|
||||
}
|
||||
})
|
||||
|
||||
refs = refs.filter(Boolean)
|
||||
}
|
||||
|
||||
return newData
|
||||
}
|
||||
|
||||
/* Stale-While-Revalidate hook to handle fetching, caching, validation, and more... */
|
||||
function useSWRV<Data = any, Error = any>(
|
||||
key: IKey
|
||||
): IResponse<Data, Error>
|
||||
function useSWRV<Data = any, Error = any>(
|
||||
key: IKey,
|
||||
fn: fetcherFn<Data> | undefined | null,
|
||||
config?: IConfig
|
||||
): IResponse<Data, Error>
|
||||
function useSWRV<Data = any, Error = any>(...args: any[]): IResponse<Data, Error> {
|
||||
const injectedConfig = inject<Partial<IConfig> | null>('swrv-config', null)
|
||||
tinyassert(injectedConfig, 'Injected swrv-config must be an object')
|
||||
let key: IKey
|
||||
let fn: fetcherFn<Data> | undefined | null
|
||||
let config: IConfig = { ...defaultConfig, ...injectedConfig }
|
||||
let unmounted = false
|
||||
let isHydrated = false
|
||||
|
||||
const instance = getCurrentInstance() as any
|
||||
const vm = instance?.proxy || instance // https://github.com/vuejs/composition-api/pull/520
|
||||
if (!vm) {
|
||||
console.error('Could not get current instance, check to make sure that `useSwrv` is declared in the top level of the setup function.')
|
||||
throw new Error('Could not get current instance')
|
||||
}
|
||||
|
||||
const IS_SERVER = typeof window === 'undefined' || false
|
||||
// #region ssr
|
||||
const isSsrHydration = Boolean(
|
||||
!IS_SERVER &&
|
||||
window !== undefined && (window as any).window.swrv)
|
||||
// #endregion
|
||||
if (args.length >= 1) {
|
||||
key = args[0]
|
||||
}
|
||||
if (args.length >= 2) {
|
||||
fn = args[1]
|
||||
}
|
||||
if (args.length > 2) {
|
||||
config = {
|
||||
...config,
|
||||
...args[2]
|
||||
}
|
||||
}
|
||||
|
||||
const ttl = IS_SERVER ? config.serverTTL : config.ttl
|
||||
const keyRef = typeof key === 'function' ? (key as any) : ref(key)
|
||||
|
||||
if (typeof fn === 'undefined') {
|
||||
// use the global fetcher
|
||||
fn = config.fetcher
|
||||
}
|
||||
|
||||
let stateRef: StateRef<Data, Error> | null = null
|
||||
// #region ssr
|
||||
if (isSsrHydration) {
|
||||
// component was ssrHydrated, so make the ssr reactive as the initial data
|
||||
|
||||
const swrvState = (window as any).window.swrv || []
|
||||
const swrvKey = nanoHex(vm.$.type.__name ?? vm.$.type.name)
|
||||
if (swrvKey !== undefined && swrvKey !== null) {
|
||||
const nodeState = swrvState[swrvKey] || []
|
||||
const instanceState = nodeState[nanoHex(isRef(keyRef) ? keyRef.value : keyRef())]
|
||||
|
||||
if (instanceState) {
|
||||
stateRef = reactive(instanceState)
|
||||
isHydrated = true
|
||||
}
|
||||
}
|
||||
}
|
||||
// #endregion
|
||||
|
||||
if (!stateRef) {
|
||||
stateRef = reactive({
|
||||
data: undefined,
|
||||
error: undefined,
|
||||
isValidating: true,
|
||||
isLoading: true,
|
||||
key: null
|
||||
}) as StateRef<Data, Error>
|
||||
}
|
||||
|
||||
/**
|
||||
* Revalidate the cache, mutate data
|
||||
*/
|
||||
const revalidate = async (data?: fetcherFn<Data>, opts?: revalidateOptions) => {
|
||||
const isFirstFetch = stateRef.data === undefined
|
||||
const keyVal = keyRef.value
|
||||
if (!keyVal) { return }
|
||||
|
||||
const cacheItem = config.cache!.get(keyVal)
|
||||
const newData = cacheItem && cacheItem.data
|
||||
|
||||
stateRef.isValidating = true
|
||||
stateRef.isLoading = !newData
|
||||
if (newData) {
|
||||
stateRef.data = newData.data
|
||||
stateRef.error = newData.error
|
||||
}
|
||||
|
||||
const fetcher = data || fn
|
||||
if (
|
||||
!fetcher ||
|
||||
(!IS_SERVER && !(config as any).isDocumentVisible() && !isFirstFetch) ||
|
||||
(opts?.forceRevalidate !== undefined && !opts?.forceRevalidate)
|
||||
) {
|
||||
stateRef.isValidating = false
|
||||
stateRef.isLoading = false
|
||||
return
|
||||
}
|
||||
|
||||
// Dedupe items that were created in the last interval #76
|
||||
if (cacheItem) {
|
||||
const shouldRevalidate = Boolean(
|
||||
((Date.now() - cacheItem.createdAt) >= (config as any).dedupingInterval) || opts?.forceRevalidate
|
||||
)
|
||||
|
||||
if (!shouldRevalidate) {
|
||||
stateRef.isValidating = false
|
||||
stateRef.isLoading = false
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const trigger = async () => {
|
||||
const promiseFromCache = PROMISES_CACHE.get(keyVal)
|
||||
if (!promiseFromCache) {
|
||||
const fetcherArgs = Array.isArray(keyVal) ? keyVal : [keyVal]
|
||||
const newPromise = fetcher(...fetcherArgs)
|
||||
PROMISES_CACHE.set(keyVal, newPromise, (config as any).dedupingInterval)
|
||||
await mutate(keyVal, newPromise, (config as any).cache, ttl)
|
||||
} else {
|
||||
await mutate(keyVal, promiseFromCache.data, (config as any).cache, ttl)
|
||||
}
|
||||
stateRef.isValidating = false
|
||||
stateRef.isLoading = false
|
||||
PROMISES_CACHE.delete(keyVal)
|
||||
if (stateRef.error !== undefined) {
|
||||
const shouldRetryOnError = !unmounted && config.shouldRetryOnError && (opts ? opts.shouldRetryOnError : true)
|
||||
if (shouldRetryOnError) {
|
||||
onErrorRetry(revalidate, opts ? Number(opts.errorRetryCount) : 1, config)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (newData && config.revalidateDebounce) {
|
||||
setTimeout(async () => {
|
||||
if (!unmounted) {
|
||||
await trigger()
|
||||
}
|
||||
}, config.revalidateDebounce)
|
||||
} else {
|
||||
await trigger()
|
||||
}
|
||||
}
|
||||
|
||||
const revalidateCall = async () => revalidate(null as any, { shouldRetryOnError: false })
|
||||
let timer: any = null
|
||||
/**
|
||||
* Setup polling
|
||||
*/
|
||||
onMounted(() => {
|
||||
const tick = async () => {
|
||||
// component might un-mount during revalidate, so do not set a new timeout
|
||||
// if this is the case, but continue to revalidate since promises can't
|
||||
// be cancelled and new hook instances might rely on promise/data cache or
|
||||
// from pre-fetch
|
||||
if (!stateRef.error && (config as any).isOnline()) {
|
||||
// if API request errored, we stop polling in this round
|
||||
// and let the error retry function handle it
|
||||
await revalidate()
|
||||
} else {
|
||||
if (timer) {
|
||||
clearTimeout(timer)
|
||||
}
|
||||
}
|
||||
|
||||
if (config.refreshInterval && !unmounted) {
|
||||
timer = setTimeout(tick, config.refreshInterval)
|
||||
}
|
||||
}
|
||||
|
||||
if (config.refreshInterval) {
|
||||
timer = setTimeout(tick, config.refreshInterval)
|
||||
}
|
||||
if (config.revalidateOnFocus) {
|
||||
document.addEventListener('visibilitychange', revalidateCall, false)
|
||||
window.addEventListener('focus', revalidateCall, false)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* Teardown
|
||||
*/
|
||||
onUnmounted(() => {
|
||||
unmounted = true
|
||||
if (timer) {
|
||||
clearTimeout(timer)
|
||||
}
|
||||
if (config.revalidateOnFocus) {
|
||||
document.removeEventListener('visibilitychange', revalidateCall, false)
|
||||
window.removeEventListener('focus', revalidateCall, false)
|
||||
}
|
||||
const refCacheItem = REF_CACHE.get(keyRef.value)
|
||||
if (refCacheItem) {
|
||||
refCacheItem.data = refCacheItem.data.filter((ref) => ref !== stateRef)
|
||||
}
|
||||
})
|
||||
|
||||
// #region ssr
|
||||
if (IS_SERVER) {
|
||||
const ssrContext = useSSRContext()
|
||||
// make sure srwv exists in ssrContext
|
||||
let swrvRes: Record<string, any> = {}
|
||||
if (ssrContext) {
|
||||
swrvRes = ssrContext.swrv = ssrContext.swrv || swrvRes
|
||||
}
|
||||
|
||||
const ssrKey = nanoHex(vm.$.type.__name ?? vm.$.type.name)
|
||||
// if (!vm.$vnode || (vm.$node && !vm.$node.data)) {
|
||||
// vm.$vnode = {
|
||||
// data: { attrs: { 'data-swrv-key': ssrKey } }
|
||||
// }
|
||||
// }
|
||||
|
||||
// const attrs = (vm.$vnode.data.attrs = vm.$vnode.data.attrs || {})
|
||||
// attrs['data-swrv-key'] = ssrKey
|
||||
// // Nuxt compatibility
|
||||
// if (vm.$ssrContext && vm.$ssrContext.nuxt) {
|
||||
// vm.$ssrContext.nuxt.swrv = swrvRes
|
||||
// }
|
||||
if (ssrContext) {
|
||||
ssrContext.swrv = swrvRes
|
||||
}
|
||||
onServerPrefetch(async () => {
|
||||
await revalidate()
|
||||
if (!swrvRes[ssrKey]) swrvRes[ssrKey] = {}
|
||||
|
||||
swrvRes[ssrKey][nanoHex(keyRef.value)] = {
|
||||
data: stateRef.data,
|
||||
error: stateRef.error,
|
||||
isValidating: stateRef.isValidating
|
||||
}
|
||||
})
|
||||
}
|
||||
// #endregion
|
||||
|
||||
/**
|
||||
* Revalidate when key dependencies change
|
||||
*/
|
||||
try {
|
||||
watch(keyRef, (val) => {
|
||||
if (!isReadonly(keyRef)) {
|
||||
keyRef.value = val
|
||||
}
|
||||
stateRef.key = val
|
||||
stateRef.isValidating = Boolean(val)
|
||||
setRefCache(keyRef.value, stateRef, Number(ttl))
|
||||
|
||||
if (!IS_SERVER && !isHydrated && keyRef.value) {
|
||||
revalidate()
|
||||
}
|
||||
isHydrated = false
|
||||
}, {
|
||||
immediate: true
|
||||
})
|
||||
} catch {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
const res: IResponse = {
|
||||
...toRefs(stateRef),
|
||||
mutate: (data?: fetcherFn<Data>, opts?: revalidateOptions) => revalidate(data, {
|
||||
...opts,
|
||||
forceRevalidate: true
|
||||
})
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
function isPromise<T>(p: any): p is Promise<T> {
|
||||
return p !== null && typeof p === 'object' && typeof p.then === 'function'
|
||||
}
|
||||
|
||||
/**
|
||||
* string to hex 8 chars
|
||||
* @param name string
|
||||
* @returns string
|
||||
*/
|
||||
function nanoHex(name: string): string {
|
||||
try {
|
||||
let hash = 0
|
||||
for (let i = 0; i < name.length; i++) {
|
||||
const chr = name.charCodeAt(i)
|
||||
hash = ((hash << 5) - hash) + chr
|
||||
hash |= 0 // Convert to 32bit integer
|
||||
}
|
||||
let hex = (hash >>> 0).toString(16)
|
||||
while (hex.length < 8) {
|
||||
hex = '0' + hex
|
||||
}
|
||||
return hex
|
||||
} catch {
|
||||
console.error("err name: ", name)
|
||||
return '0000'
|
||||
}
|
||||
}
|
||||
export const vueSWR = (swrvConfig: Partial<IConfig> = defaultConfig): FunctionPlugin => (app) => {
|
||||
app.config.globalProperties.$swrv = useSWRV
|
||||
// app.provide('swrv', useSWRV)
|
||||
app.provide('swrv-config', swrvConfig)
|
||||
}
|
||||
export { mutate };
|
||||
export default useSWRV
|
||||
@@ -11,7 +11,7 @@ import { useUploadQueue } from '@/composables/useUploadQueue';
|
||||
|
||||
const mode = ref<'local' | 'remote'>('local');
|
||||
|
||||
const { addFiles, addRemoteUrls, items, removeItem, totalSize, completeCount, pendingCount, startQueue } = useUploadQueue();
|
||||
const { addFiles, addRemoteUrls, items, removeItem, cancelItem, totalSize, completeCount, pendingCount, startQueue } = useUploadQueue();
|
||||
|
||||
const handlePublish = () => {
|
||||
console.log('Publishing items...');
|
||||
@@ -50,7 +50,7 @@ const handleRemoteUrls = (urls: string[]) => {
|
||||
</div>
|
||||
</div>
|
||||
<UploadQueue :items="items" :total-size="totalSize" :complete-count="completeCount"
|
||||
:pending-count="pendingCount" @remove-item="removeItem" @publish="handlePublish"
|
||||
:pending-count="pendingCount" @remove-item="removeItem" @cancel-item="cancelItem" @publish="handlePublish"
|
||||
@start-queue="startQueue" />
|
||||
</div>
|
||||
</template>
|
||||
@@ -11,9 +11,10 @@ defineProps<{
|
||||
|
||||
const emit = defineEmits<{
|
||||
removeItem: [id: string];
|
||||
cancelItem: [id: string];
|
||||
publish: [];
|
||||
startQueue: [];
|
||||
}>();
|
||||
}>()
|
||||
</script>
|
||||
|
||||
<template>
|
||||
@@ -53,7 +54,7 @@ const emit = defineEmits<{
|
||||
</div>
|
||||
|
||||
<UploadQueueItem v-for="item in items" :key="item.id" :item="item"
|
||||
@remove="emit('removeItem', $event)" />
|
||||
@remove="emit('removeItem', $event)" @cancel="emit('cancelItem', $event)" />
|
||||
</div>
|
||||
|
||||
<div class="p-6 border-t border-border shrink-0">
|
||||
|
||||
@@ -10,12 +10,13 @@ const props = defineProps<{
|
||||
|
||||
const emit = defineEmits<{
|
||||
remove: [id: string];
|
||||
cancel: [id: string];
|
||||
}>();
|
||||
|
||||
const statusLabel = computed(() => {
|
||||
switch (props.item.status) {
|
||||
case 'pending': return 'Pending';
|
||||
case 'uploading': return 'Uploading...';
|
||||
case 'uploading': return props.item.activeChunks ? `Uploading (${props.item.activeChunks} threads)` : 'Uploading...';
|
||||
case 'processing': return 'Processing...';
|
||||
case 'complete': return 'Completed';
|
||||
case 'error': return 'Failed';
|
||||
@@ -32,6 +33,10 @@ const statusColor = computed(() => {
|
||||
default: return 'bg-accent';
|
||||
}
|
||||
});
|
||||
|
||||
const canCancel = computed(() => {
|
||||
return props.item.status === 'uploading' || props.item.status === 'pending';
|
||||
});
|
||||
</script>
|
||||
|
||||
<template>
|
||||
@@ -72,7 +77,16 @@ const statusColor = computed(() => {
|
||||
<span class="w-2 h-2 rounded-full animate-pulse" :class="statusColor"></span>
|
||||
{{ statusLabel }}
|
||||
</span>
|
||||
<span class="text-accent font-bold">{{ item.progress || 0 }}%</span>
|
||||
<div class="flex items-center gap-2">
|
||||
<button
|
||||
v-if="canCancel && !minimal"
|
||||
@click="emit('cancel', item.id)"
|
||||
class="text-[10px] px-2 py-0.5 bg-red-50 text-red-500 hover:bg-red-100 rounded transition"
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
<span class="text-accent font-bold">{{ item.progress || 0 }}%</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="h-1.5 w-full bg-slate-100 rounded-full overflow-hidden relative">
|
||||
<div class="absolute inset-0 bg-accent/20 animate-pulse w-full"></div>
|
||||
|
||||
213
src/server/modules/merge.ts
Normal file
213
src/server/modules/merge.ts
Normal file
@@ -0,0 +1,213 @@
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
import { AwsClient } from 'aws4fetch';
|
||||
|
||||
export type Part = {
|
||||
index: number
|
||||
host: string
|
||||
url: string
|
||||
}
|
||||
|
||||
export type Manifest = {
|
||||
version: 1
|
||||
id: string
|
||||
filename: string
|
||||
total_parts: number
|
||||
parts: Part[]
|
||||
createdAt: number
|
||||
expiresAt: number
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// S3 Config
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const S3_ENDPOINT = "https://minio1.webtui.vn:9000"
|
||||
const BUCKET_NAME = "bucket-lethdat"
|
||||
|
||||
const aws = new AwsClient({
|
||||
accessKeyId: "lethdat",
|
||||
secretAccessKey: "D@tkhong9",
|
||||
service: 's3',
|
||||
region: 'auto'
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// S3 Operations
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const OBJECT_KEY = (id: string) => `${id}.json`
|
||||
|
||||
/** Persist a manifest as JSON in MinIO. */
|
||||
export async function saveManifest(manifest: Manifest): Promise<void> {
|
||||
const url = `${S3_ENDPOINT}/${BUCKET_NAME}/${OBJECT_KEY(manifest.id)}`;
|
||||
|
||||
const response = await aws.fetch(url, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(manifest),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to save manifest: ${response.status} ${await response.text()}`)
|
||||
}
|
||||
}
|
||||
|
||||
/** Fetch a manifest from MinIO. */
|
||||
export async function getManifest(id: string): Promise<Manifest | null> {
|
||||
const url = `${S3_ENDPOINT}/${BUCKET_NAME}/${OBJECT_KEY(id)}`;
|
||||
|
||||
try {
|
||||
const response = await aws.fetch(url, {
|
||||
method: 'GET',
|
||||
});
|
||||
|
||||
if (response.status === 404) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to get manifest: ${response.status}`)
|
||||
}
|
||||
|
||||
const text = await response.text()
|
||||
const manifest: Manifest = JSON.parse(text)
|
||||
|
||||
if (manifest.expiresAt < Date.now()) {
|
||||
await deleteManifest(id).catch(() => {})
|
||||
return null
|
||||
}
|
||||
|
||||
return manifest
|
||||
} catch (error) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/** Remove a manifest object from MinIO. */
|
||||
export async function deleteManifest(id: string): Promise<void> {
|
||||
const url = `${S3_ENDPOINT}/${BUCKET_NAME}/${OBJECT_KEY(id)}`;
|
||||
|
||||
const response = await aws.fetch(url, {
|
||||
method: 'DELETE',
|
||||
});
|
||||
|
||||
if (!response.ok && response.status !== 404) {
|
||||
throw new Error(`Failed to delete manifest: ${response.status}`)
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// Allowed chunk source hosts
|
||||
const ALLOWED_HOSTS = [
|
||||
'tmpfiles.org',
|
||||
'gofile.io',
|
||||
'pixeldrain.com',
|
||||
'uploadfiles.io',
|
||||
'anonfiles.com',
|
||||
]
|
||||
|
||||
/** Returns an error message if any URL is disallowed, otherwise null. */
|
||||
export function validateChunkUrls(chunks: string[]): string | null {
|
||||
for (const u of chunks) {
|
||||
try {
|
||||
const { hostname } = new URL(u)
|
||||
if (!ALLOWED_HOSTS.some(h => hostname.includes(h))) {
|
||||
return `host not allowed: ${hostname}`
|
||||
}
|
||||
} catch {
|
||||
return `invalid url: ${u}`
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
export function sanitizeFilename(name: string): string {
|
||||
return name.replace(/[^a-zA-Z0-9._-]/g, '_')
|
||||
}
|
||||
|
||||
export function detectHost(url: string): string {
|
||||
try {
|
||||
return new URL(url).hostname.replace(/^www\./, '')
|
||||
} catch {
|
||||
return 'unknown'
|
||||
}
|
||||
}
|
||||
|
||||
function formatUrl(url: string): string {
|
||||
if (url.includes("tmpfiles.org/") && !url.includes("tmpfiles.org/dl/")) {
|
||||
return url.trim().replace("tmpfiles.org/", 'tmpfiles.org/dl/')
|
||||
}
|
||||
return url.trim()
|
||||
}
|
||||
|
||||
/** List all manifests in bucket (simple implementation). */
|
||||
export async function getListFiles(): Promise<string[]> {
|
||||
// For now return empty array - implement listing if needed
|
||||
// MinIO S3 ListObjectsV2 would require XML parsing
|
||||
return []
|
||||
}
|
||||
|
||||
/** Build a new Manifest. */
|
||||
export function createManifest(
|
||||
filename: string,
|
||||
chunks: string[],
|
||||
ttlMs = 60 * 60 * 1000,
|
||||
): Manifest {
|
||||
const id = crypto.randomUUID()
|
||||
const now = Date.now()
|
||||
return {
|
||||
version: 1,
|
||||
id,
|
||||
filename: sanitizeFilename(filename),
|
||||
total_parts: chunks.length,
|
||||
parts: chunks.map((url, index) => ({ index, host: detectHost(url), url: formatUrl(url) })),
|
||||
createdAt: now,
|
||||
expiresAt: now + ttlMs,
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Streaming
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Streams all parts in index order as one continuous ReadableStream. */
|
||||
export function streamManifest(manifest: Manifest): ReadableStream<Uint8Array> {
|
||||
const parts = [...manifest.parts].sort((a, b) => a.index - b.index)
|
||||
const RETRY = 3
|
||||
return new ReadableStream({
|
||||
async start(controller) {
|
||||
for (const part of parts) {
|
||||
let attempt = 0
|
||||
let ok = false
|
||||
while (attempt < RETRY && !ok) {
|
||||
attempt++
|
||||
try {
|
||||
const res = await fetch(formatUrl(part.url))
|
||||
if (!res.ok) throw new Error(`HTTP ${res.status}`)
|
||||
const reader = res.body!.getReader()
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
controller.enqueue(value)
|
||||
}
|
||||
ok = true
|
||||
} catch (err: any) {
|
||||
if (attempt >= RETRY) {
|
||||
controller.error(new Error(`Part ${part.index} failed: ${err?.message ?? err}`))
|
||||
return
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 1000 * attempt))
|
||||
}
|
||||
}
|
||||
}
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
}
|
||||
Reference in New Issue
Block a user