fix: 判断DomException,新增分片数量的检查

This commit is contained in:
Pan Qiancheng 2025-12-26 14:42:20 +08:00
parent 347e1ab360
commit 147dc5eb12
8 changed files with 34 additions and 18 deletions

View File

@ -84,9 +84,11 @@ const triggers = [
assert(data.origin, `启用分片上传的extraFile必须指定origin`);
assert(data.chunkInfo?.chunkSize, `启用分片上传的extraFile必须指定chunkInfo.chunkSize`);
assert(data.size, `启用分片上传的extraFile必须指定size`);
assert(data.chunkInfo?.chunkSize > 100 * 1024, `chunkInfo.chunkSize必须大于100KB`);
assert(data.chunkInfo?.chunkSize < 1 * 1024 * 1024 * 1024, `chunkInfo.chunkSize必须小于1GB`);
assert(data.chunkInfo?.chunkSize >= 100 * 1024, `chunkInfo.chunkSize必须大于100KB`);
assert(data.chunkInfo?.chunkSize <= 1 * 1024 * 1024 * 1024, `chunkInfo.chunkSize必须小于1GB`);
assert(data.chunkInfo?.partCount && data.chunkInfo.partCount > 0, `chunkInfo.partCount必须大于0`);
assert(!data.chunkInfo?.merged, `chunkInfo.merged必须为false`);
assert(data.chunkInfo?.partCount <= 100, `分片数量不能超过100`);
// 计算partCount 是否正确
const expectedPartCount = Math.ceil(data.size / data.chunkInfo.chunkSize);
assert(data.chunkInfo.partCount === expectedPartCount, `chunkInfo.partCount计算错误预期值为${expectedPartCount},但实际值为${data.chunkInfo.partCount}`);

View File

@ -1,6 +1,7 @@
import { OpSchema } from '../../oak-app-domain/ExtraFile/Schema';
import { UploadFn } from "../../types/Cos";
import { EntityDict } from '../../oak-app-domain';
export declare function isAbortError(error: any): boolean;
/**
* S3存储服务AWSMinIOOSS等
* @param options

View File

@ -1,7 +1,9 @@
import { isOakException, OakUserException } from 'oak-domain/lib/types/Exception';
import { sliceFile, cleanTempFiles } from '../files/slice';
import assert from 'assert';
import { OakUploadException } from '../../types/Exception';
export function isAbortError(error) {
return error instanceof DOMException && error.name === 'AbortError';
}
/**
* 分片上传通用方法适用于所有类S3存储服务如AWSMinIO阿里云OSS等
* @param options 参数
@ -62,7 +64,7 @@ export async function chunkUpload(options) {
console.error(`分片 ${part.partNumber} 上传第 ${attempt + 1} 次失败:`, err);
lastError = err;
// 如果是OakUserException说明是用户主动中止上传不进行重试
if (isOakException(err, OakUserException)) {
if (isAbortError(err)) {
throw err;
}
if (attempt < retryTimes) {
@ -88,7 +90,7 @@ export async function chunkUpload(options) {
await uploadPart(task.part, task.chunk);
}
catch (err) {
if (isOakException(err, OakUserException)) {
if (isAbortError(err)) {
// 用户主动中止上传,抛到上层再处理
console.log(`分片 ${task.part.partNumber} 上传被用户中止`);
}

View File

@ -87,9 +87,11 @@ const triggers = [
(0, assert_1.default)(data.origin, `启用分片上传的extraFile必须指定origin`);
(0, assert_1.default)(data.chunkInfo?.chunkSize, `启用分片上传的extraFile必须指定chunkInfo.chunkSize`);
(0, assert_1.default)(data.size, `启用分片上传的extraFile必须指定size`);
(0, assert_1.default)(data.chunkInfo?.chunkSize > 100 * 1024, `chunkInfo.chunkSize必须大于100KB`);
(0, assert_1.default)(data.chunkInfo?.chunkSize < 1 * 1024 * 1024 * 1024, `chunkInfo.chunkSize必须小于1GB`);
(0, assert_1.default)(data.chunkInfo?.chunkSize >= 100 * 1024, `chunkInfo.chunkSize必须大于100KB`);
(0, assert_1.default)(data.chunkInfo?.chunkSize <= 1 * 1024 * 1024 * 1024, `chunkInfo.chunkSize必须小于1GB`);
(0, assert_1.default)(data.chunkInfo?.partCount && data.chunkInfo.partCount > 0, `chunkInfo.partCount必须大于0`);
(0, assert_1.default)(!data.chunkInfo?.merged, `chunkInfo.merged必须为false`);
(0, assert_1.default)(data.chunkInfo?.partCount <= 100, `分片数量不能超过100`);
// 计算partCount 是否正确
const expectedPartCount = Math.ceil(data.size / data.chunkInfo.chunkSize);
(0, assert_1.default)(data.chunkInfo.partCount === expectedPartCount, `chunkInfo.partCount计算错误预期值为${expectedPartCount},但实际值为${data.chunkInfo.partCount}`);

View File

@ -1,6 +1,7 @@
import { OpSchema } from '../../oak-app-domain/ExtraFile/Schema';
import { UploadFn } from "../../types/Cos";
import { EntityDict } from '../../oak-app-domain';
export declare function isAbortError(error: any): boolean;
/**
* S3存储服务AWSMinIOOSS等
* @param options

View File

@ -1,11 +1,14 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isAbortError = isAbortError;
exports.chunkUpload = chunkUpload;
const tslib_1 = require("tslib");
const Exception_1 = require("oak-domain/lib/types/Exception");
const slice_1 = require("../files/slice");
const assert_1 = tslib_1.__importDefault(require("assert"));
const Exception_2 = require("../../types/Exception");
const Exception_1 = require("../../types/Exception");
function isAbortError(error) {
return error instanceof DOMException && error.name === 'AbortError';
}
/**
* 分片上传通用方法适用于所有类S3存储服务如AWSMinIO阿里云OSS等
* @param options 参数
@ -60,13 +63,13 @@ async function chunkUpload(options) {
(0, assert_1.default)(part.etag, `无法获取分片 ${part.partNumber} 的 ETag`);
return;
}
throw new Exception_2.OakUploadException(`分片 ${part.partNumber} 上传失败`);
throw new Exception_1.OakUploadException(`分片 ${part.partNumber} 上传失败`);
}
catch (err) {
console.error(`分片 ${part.partNumber} 上传第 ${attempt + 1} 次失败:`, err);
lastError = err;
// 如果是OakUserException说明是用户主动中止上传不进行重试
if ((0, Exception_1.isOakException)(err, Exception_1.OakUserException)) {
if (isAbortError(err)) {
throw err;
}
if (attempt < retryTimes) {
@ -75,7 +78,7 @@ async function chunkUpload(options) {
}
}
}
throw lastError || new Exception_2.OakUploadException(`分片 ${part.partNumber} 上传失败`);
throw lastError || new Exception_1.OakUploadException(`分片 ${part.partNumber} 上传失败`);
};
// 并行上传控制
const uploadTasks = pendingParts.map((part) => ({
@ -92,7 +95,7 @@ async function chunkUpload(options) {
await uploadPart(task.part, task.chunk);
}
catch (err) {
if ((0, Exception_1.isOakException)(err, Exception_1.OakUserException)) {
if (isAbortError(err)) {
// 用户主动中止上传,抛到上层再处理
console.log(`分片 ${task.part.partNumber} 上传被用户中止`);
}

View File

@ -106,9 +106,11 @@ const triggers: Trigger<EntityDict, 'extraFile', BRC<EntityDict>>[] = [
assert(data.origin, `启用分片上传的extraFile必须指定origin`);
assert(data.chunkInfo?.chunkSize, `启用分片上传的extraFile必须指定chunkInfo.chunkSize`);
assert(data.size, `启用分片上传的extraFile必须指定size`);
assert(data.chunkInfo?.chunkSize > 100 * 1024, `chunkInfo.chunkSize必须大于100KB`);
assert(data.chunkInfo?.chunkSize < 1 * 1024 * 1024 * 1024, `chunkInfo.chunkSize必须小于1GB`);
assert(data.chunkInfo?.chunkSize >= 100 * 1024, `chunkInfo.chunkSize必须大于100KB`);
assert(data.chunkInfo?.chunkSize <= 1 * 1024 * 1024 * 1024, `chunkInfo.chunkSize必须小于1GB`);
assert(data.chunkInfo?.partCount && data.chunkInfo.partCount > 0, `chunkInfo.partCount必须大于0`);
assert(!data.chunkInfo?.merged, `chunkInfo.merged必须为false`);
assert(data.chunkInfo?.partCount <= 100, `分片数量不能超过100`);
// 计算partCount 是否正确
const expectedPartCount = Math.ceil(data.size! / data.chunkInfo!.chunkSize);

View File

@ -1,4 +1,3 @@
import { isOakException, OakUserException } from 'oak-domain/lib/types/Exception';
import { sliceFile, cleanTempFiles } from '../files/slice';
import { OpSchema } from '../../oak-app-domain/ExtraFile/Schema';
import { UploadFn } from "../../types/Cos";
@ -6,6 +5,10 @@ import { EntityDict } from '../../oak-app-domain';
import assert from 'assert';
import { OakUploadException } from '../../types/Exception';
export function isAbortError(error: any): boolean {
return error instanceof DOMException && error.name === 'AbortError';
}
/**
* S3存储服务AWSMinIOOSS等
* @param options
@ -97,7 +100,7 @@ export async function chunkUpload(
console.error(`分片 ${part.partNumber} 上传第 ${attempt + 1} 次失败:`, err);
lastError = err;
// 如果是OakUserException说明是用户主动中止上传不进行重试
if (isOakException(err, OakUserException)) {
if (isAbortError(err)) {
throw err;
}
if (attempt < retryTimes) {
@ -125,7 +128,7 @@ export async function chunkUpload(
try {
await uploadPart(task.part, task.chunk);
} catch (err) {
if (isOakException(err, OakUserException)) {
if (isAbortError(err)) {
// 用户主动中止上传,抛到上层再处理
console.log(`分片 ${task.part.partNumber} 上传被用户中止`);
}