Merge branch '5.0' into dev
This commit is contained in:
commit
deb3384362
|
|
@ -1,5 +0,0 @@
|
|||
import { CascadeRelationItem, RelationHierarchy, EntityDict } from "../types/Entity";
|
||||
export type GenericRelation = 'owner';
|
||||
export declare function convertHierarchyToAuth<ED extends EntityDict, T extends keyof ED>(entity: T, hierarchy: RelationHierarchy<NonNullable<ED[T]['Relation']>>): {
|
||||
[K in NonNullable<ED[T]['Relation']>]?: CascadeRelationItem;
|
||||
};
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.convertHierarchyToAuth = void 0;
|
||||
function convertHierarchyToAuth(entity, hierarchy) {
|
||||
const reverseHierarchy = {};
|
||||
for (const r in hierarchy) {
|
||||
for (const r2 of hierarchy[r]) {
|
||||
if (reverseHierarchy[r2]) {
|
||||
reverseHierarchy[r2]?.push(r);
|
||||
}
|
||||
else {
|
||||
reverseHierarchy[r2] = [r];
|
||||
}
|
||||
}
|
||||
}
|
||||
const result = {};
|
||||
for (const r in reverseHierarchy) {
|
||||
result[r] = {
|
||||
cascadePath: '',
|
||||
relations: reverseHierarchy[r],
|
||||
};
|
||||
}
|
||||
return result;
|
||||
}
|
||||
exports.convertHierarchyToAuth = convertHierarchyToAuth;
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
export declare const ActionDefDict: {
|
||||
export declare const actionDefDict: {
|
||||
modi: {
|
||||
iState: import("../types").ActionDef<string, string>;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ActionDefDict = void 0;
|
||||
exports.actionDefDict = void 0;
|
||||
const Action_1 = require("./Modi/Action");
|
||||
const Action_2 = require("./User/Action");
|
||||
exports.ActionDefDict = {
|
||||
modi: Action_1.ActionDefDict,
|
||||
user: Action_2.ActionDefDict
|
||||
exports.actionDefDict = {
|
||||
modi: Action_1.actionDefDict,
|
||||
user: Action_2.actionDefDict
|
||||
};
|
||||
|
|
|
|||
|
|
@ -5,6 +5,6 @@ export type IAction = 'apply' | 'abandon' | string;
|
|||
export type ParticularAction = IAction;
|
||||
export declare const actions: string[];
|
||||
export type Action = GenericAction | ParticularAction | string;
|
||||
export declare const ActionDefDict: {
|
||||
export declare const actionDefDict: {
|
||||
iState: ActionDef<string, string>;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ActionDefDict = exports.actions = void 0;
|
||||
exports.actionDefDict = exports.actions = void 0;
|
||||
const IActionDef = {
|
||||
stm: {
|
||||
apply: ['active', 'applied'],
|
||||
|
|
@ -9,6 +9,6 @@ const IActionDef = {
|
|||
is: 'active',
|
||||
};
|
||||
exports.actions = ["count", "stat", "download", "select", "aggregate", "create", "remove", "update", "apply", "abandon"];
|
||||
exports.ActionDefDict = {
|
||||
exports.actionDefDict = {
|
||||
iState: IActionDef
|
||||
};
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
import { EntityDef } from "./Schema";
|
||||
import { StyleDef } from "../../types/Style";
|
||||
export declare const style: StyleDef<EntityDef["OpSchema"], EntityDef["Action"]>;
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.style = void 0;
|
||||
exports.style = {
|
||||
icon: {
|
||||
apply: '',
|
||||
abandon: '',
|
||||
},
|
||||
color: {
|
||||
iState: {
|
||||
active: '#0000FF',
|
||||
applied: '#008000',
|
||||
abandoned: '#A9A9A9',
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
import { EntityDict } from "./EntityDict";
|
||||
import { StyleDict } from "../types/Style";
|
||||
export declare const styleDict: StyleDict<EntityDict>;
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.styleDict = void 0;
|
||||
const Style_1 = require("./Modi/Style");
|
||||
const Style_2 = require("./User/Style");
|
||||
exports.styleDict = {
|
||||
modi: Style_1.style,
|
||||
user: Style_2.style
|
||||
};
|
||||
|
|
@ -6,6 +6,6 @@ export type ParticularAction = UserAction;
|
|||
export declare const actions: string[];
|
||||
export declare const UserActionDef: ActionDef<UserAction, UserState>;
|
||||
export type Action = GenericAction | ParticularAction | RelationAction | string;
|
||||
export declare const ActionDefDict: {
|
||||
export declare const actionDefDict: {
|
||||
userState: ActionDef<string, string>;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ActionDefDict = exports.UserActionDef = exports.actions = void 0;
|
||||
exports.actionDefDict = exports.UserActionDef = exports.actions = void 0;
|
||||
exports.actions = ["count", "stat", "download", "select", "aggregate", "create", "remove", "update", "grant", "revoke", "mergeTo"];
|
||||
exports.UserActionDef = {
|
||||
stm: {
|
||||
mergeTo: ['normal', 'merged'],
|
||||
},
|
||||
};
|
||||
exports.ActionDefDict = {
|
||||
exports.actionDefDict = {
|
||||
userState: exports.UserActionDef
|
||||
};
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
import { EntityDef } from "./Schema";
|
||||
import { StyleDef } from "../../types/Style";
|
||||
export declare const style: StyleDef<EntityDef["OpSchema"], EntityDef["Action"]>;
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.style = void 0;
|
||||
exports.style = {
|
||||
icon: {
|
||||
mergeTo: '',
|
||||
},
|
||||
color: {
|
||||
userState: {
|
||||
normal: '#112233',
|
||||
merged: '#223344',
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
import { GenericAction } from "../../actions/action";
|
||||
export type ParticularAction = 'confirm';
|
||||
export declare const actions: string[];
|
||||
export type Action = GenericAction | ParticularAction | string;
|
||||
export declare const ActionDefDict: {};
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ActionDefDict = exports.actions = void 0;
|
||||
exports.actions = ["count", "stat", "download", "select", "aggregate", "create", "remove", "update", "confirm"];
|
||||
exports.ActionDefDict = {};
|
||||
|
|
@ -2,3 +2,4 @@ export * from './EntityDict';
|
|||
export * from './Storage';
|
||||
export * from './ActionDefDict';
|
||||
export * from './Relation';
|
||||
export * from './StyleDict';
|
||||
|
|
|
|||
|
|
@ -5,3 +5,4 @@ tslib_1.__exportStar(require("./EntityDict"), exports);
|
|||
tslib_1.__exportStar(require("./Storage"), exports);
|
||||
tslib_1.__exportStar(require("./ActionDefDict"), exports);
|
||||
tslib_1.__exportStar(require("./Relation"), exports);
|
||||
tslib_1.__exportStar(require("./StyleDict"), exports);
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
type DepNode = {
|
||||
name: string;
|
||||
parent?: DepNode;
|
||||
};
|
||||
type DepGraph = {
|
||||
nodeDict: Record<string, DepNode>;
|
||||
roots: DepNode[];
|
||||
ascOrder: string[];
|
||||
};
|
||||
/**
|
||||
* 构建项目依赖关系图
|
||||
* @param cwd
|
||||
* @returns
|
||||
*/
|
||||
export declare function analyzeDepedency(cwd: string): DepGraph;
|
||||
/**
|
||||
* 本函数用于构建src/initialize.dev, src/initialize.prod, src/initializeFeatures, src/context/FrontendContext, src/contextBackendContext
|
||||
* 这些和dependency相关的项目文件
|
||||
*/
|
||||
export default function buildDependency(rebuild?: boolean): void;
|
||||
export {};
|
||||
|
|
@ -0,0 +1,829 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.analyzeDepedency = void 0;
|
||||
const tslib_1 = require("tslib");
|
||||
const assert_1 = tslib_1.__importDefault(require("assert"));
|
||||
const path_1 = require("path");
|
||||
const fs_1 = require("fs");
|
||||
const ts = tslib_1.__importStar(require("typescript"));
|
||||
const string_1 = require("../utils/string");
|
||||
const env_1 = require("./env");
|
||||
const fs_extra_1 = require("fs-extra");
|
||||
const { factory } = ts;
|
||||
/**
|
||||
* 构建项目依赖关系图
|
||||
* @param cwd
|
||||
* @returns
|
||||
*/
|
||||
function analyzeDepedency(cwd) {
|
||||
const depGraph = {
|
||||
nodeDict: {},
|
||||
roots: [],
|
||||
ascOrder: [],
|
||||
};
|
||||
function analyzeOne(dir, name, parent) {
|
||||
const node = {
|
||||
name,
|
||||
parent,
|
||||
};
|
||||
if (name) {
|
||||
depGraph.nodeDict[name] = node;
|
||||
if (!parent) {
|
||||
depGraph.roots.push(node);
|
||||
}
|
||||
}
|
||||
let dependencies = [];
|
||||
const depConfigTsFile = join(dir, 'src', 'configuration', 'dependency.ts');
|
||||
if ((0, fs_1.existsSync)(depConfigTsFile)) {
|
||||
// 这里依赖配置是ts文件,得翻译成js再读取
|
||||
const result = ts.transpileModule((0, fs_1.readFileSync)(depConfigTsFile, 'utf-8'), { compilerOptions: { module: ts.ModuleKind.CommonJS } });
|
||||
dependencies = eval(result.outputText);
|
||||
}
|
||||
else {
|
||||
const depConfigJsFile = join(dir, 'lib', 'configuration', 'dependency.js');
|
||||
if ((0, fs_1.existsSync)(depConfigJsFile)) {
|
||||
dependencies = require(depConfigJsFile);
|
||||
}
|
||||
else {
|
||||
// 没有依赖文件,直接返回
|
||||
return;
|
||||
}
|
||||
}
|
||||
dependencies.forEach((dep) => {
|
||||
const n2 = depGraph.nodeDict[dep];
|
||||
if (n2) {
|
||||
(0, assert_1.default)(name);
|
||||
}
|
||||
else {
|
||||
let dir2 = join(cwd, 'node_modules', dep);
|
||||
if (!(0, fs_1.existsSync)(dir2)) {
|
||||
dir2 = join(dir, 'node_modules', dep);
|
||||
if (!(0, fs_1.existsSync)(dir2)) {
|
||||
throw new Error(`找不到依赖包${dep}的安装位置,当前包是${dir}`);
|
||||
}
|
||||
}
|
||||
analyzeOne(dir2, dep, name ? node : undefined);
|
||||
}
|
||||
});
|
||||
}
|
||||
analyzeOne(cwd, '');
|
||||
// 输出一个从底向上的序列,因为当前的项目中最多只有一个依赖,所以暂时不写
|
||||
const deps = Object.keys(depGraph.nodeDict);
|
||||
(0, assert_1.default)(deps.length <= 1);
|
||||
depGraph.ascOrder = deps;
|
||||
return depGraph;
|
||||
}
|
||||
exports.analyzeDepedency = analyzeDepedency;
|
||||
function join(...paths) {
|
||||
const path = (0, path_1.join)(...paths);
|
||||
return path.replaceAll('\\', '/');
|
||||
}
|
||||
function destructVariableDeclaration(vd) {
|
||||
(0, assert_1.default)(ts.isIdentifier(vd.name));
|
||||
(0, assert_1.default)(vd.name.text.startsWith('total'));
|
||||
const on = (0, string_1.firstLetterLowerCase)(vd.name.text.slice(5));
|
||||
const { initializer } = vd;
|
||||
(0, assert_1.default)(ts.isCallExpression(initializer));
|
||||
(0, assert_1.default)(ts.isIdentifier(initializer.expression) && initializer.expression.text === 'mergeConcatMany');
|
||||
(0, assert_1.default)(initializer.arguments.length === 1);
|
||||
const [arg] = initializer.arguments;
|
||||
(0, assert_1.default)(ts.isAsExpression(arg));
|
||||
const { expression } = arg;
|
||||
(0, assert_1.default)(ts.isArrayLiteralExpression(expression));
|
||||
return {
|
||||
on,
|
||||
expression,
|
||||
};
|
||||
}
|
||||
function outputPolyfillDts(dependencies, briefNames, sourceFile, printer, filename) {
|
||||
let statements2 = [];
|
||||
if (dependencies.length > 0) {
|
||||
const { statements } = sourceFile;
|
||||
(0, assert_1.default)(ts.isImportDeclaration(statements[5]) && ts.isModuleDeclaration(statements[6]));
|
||||
const importStatements = [];
|
||||
dependencies.forEach((dep, idx) => {
|
||||
importStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports([factory.createImportSpecifier(false, factory.createIdentifier("FeatureDict"), factory.createIdentifier(`${(0, string_1.firstLetterUpperCase)(briefNames[idx])}FeatureDict`))])), factory.createStringLiteral(`${dep}/es/features`), undefined), factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports([factory.createImportSpecifier(false, factory.createIdentifier("AspectDict"), factory.createIdentifier(`${(0, string_1.firstLetterUpperCase)(briefNames[idx])}AspectDict`))])), factory.createStringLiteral(`${dep}/es/aspects`), undefined));
|
||||
});
|
||||
/**
|
||||
* declare global {
|
||||
const OakComponent: MakeOakComponent<
|
||||
EntityDict,
|
||||
BackendRuntimeContext,
|
||||
FrontendRuntimeContext,
|
||||
AspectDict & OgbAspectDict<EntityDict, BackendRuntimeContext>,
|
||||
FeatureDict & OgbFeatureDict<EntityDict>
|
||||
>;
|
||||
const features: FeatureDict & OgbFeatureDict<EntityDict>;
|
||||
}
|
||||
*/
|
||||
const stmt6 = statements[6];
|
||||
const { body } = stmt6;
|
||||
const [ocStmt, featuresStmt] = body.statements;
|
||||
(0, assert_1.default)(ts.isVariableStatement(ocStmt) && ts.isVariableStatement(featuresStmt));
|
||||
const [ocVd] = ocStmt.declarationList.declarations;
|
||||
const [featuresVd] = featuresStmt.declarationList.declarations;
|
||||
(0, assert_1.default)(ts.isVariableDeclaration(ocVd) && ts.isIdentifier(ocVd.name) && ocVd.name.text === 'OakComponent');
|
||||
(0, assert_1.default)(ts.isVariableDeclaration(featuresVd) && ts.isIdentifier(featuresVd.name) && featuresVd.name.text === 'features');
|
||||
const ocType = ocVd.type;
|
||||
(0, assert_1.default)(ts.isTypeReferenceNode(ocType) && ocType.typeArguments?.length === 5);
|
||||
const aspectTypeNode = ocType.typeArguments[3];
|
||||
const featureTypeNode = ocType.typeArguments[4];
|
||||
(0, assert_1.default)(ts.isTypeReferenceNode(aspectTypeNode) && ts.isTypeReferenceNode(featureTypeNode));
|
||||
Object.assign(ocType, {
|
||||
typeArguments: [
|
||||
...ocType.typeArguments.slice(0, 3),
|
||||
factory.createIntersectionTypeNode([
|
||||
aspectTypeNode,
|
||||
...briefNames.map((ele) => factory.createTypeReferenceNode(factory.createIdentifier(`${(0, string_1.firstLetterUpperCase)(ele)}AspectDict`), [
|
||||
factory.createTypeReferenceNode(factory.createIdentifier("EntityDict"), undefined)
|
||||
]))
|
||||
]),
|
||||
factory.createIntersectionTypeNode([
|
||||
featureTypeNode,
|
||||
...briefNames.map((ele) => factory.createTypeReferenceNode(factory.createIdentifier(`${(0, string_1.firstLetterUpperCase)(ele)}FeatureDict`), [
|
||||
factory.createTypeReferenceNode(factory.createIdentifier("EntityDict"), undefined),
|
||||
factory.createTypeReferenceNode(factory.createIdentifier("BackendRuntimeContext"), undefined)
|
||||
]))
|
||||
])
|
||||
]
|
||||
});
|
||||
const featureType = featuresVd.type;
|
||||
(0, assert_1.default)(ts.isTypeReferenceNode(featureType));
|
||||
Object.assign(featuresVd, {
|
||||
type: factory.createIntersectionTypeNode([
|
||||
featureType,
|
||||
...briefNames.map((ele) => factory.createTypeReferenceNode(factory.createIdentifier(`${(0, string_1.firstLetterUpperCase)(ele)}FeatureDict`), [
|
||||
factory.createTypeReferenceNode(factory.createIdentifier("EntityDict"), undefined),
|
||||
factory.createTypeReferenceNode(factory.createIdentifier("BackendRuntimeContext"), undefined)
|
||||
]))
|
||||
])
|
||||
});
|
||||
statements2 = [
|
||||
...statements.slice(0, 6),
|
||||
...importStatements,
|
||||
...statements.slice(6)
|
||||
];
|
||||
}
|
||||
else {
|
||||
statements2 = [...sourceFile.statements];
|
||||
}
|
||||
const result = printer.printList(ts.ListFormat.SourceFileStatements, factory.createNodeArray(statements2), sourceFile);
|
||||
(0, fs_1.writeFileSync)(filename, result, { flag: 'w' });
|
||||
console.log(`构建${filename}文件成功`);
|
||||
}
|
||||
function outputDependentExceptions(dependencies, briefNames, sourceFile, printer, filename) {
|
||||
let statements2 = [];
|
||||
if (dependencies.length > 0) {
|
||||
const { statements } = sourceFile;
|
||||
(0, assert_1.default)(ts.isImportDeclaration(statements[2]) && ts.isFunctionDeclaration(statements[3]));
|
||||
const importStatements = [];
|
||||
dependencies.forEach((dep, idx) => {
|
||||
importStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports([factory.createImportSpecifier(false, factory.createIdentifier("makeException"), factory.createIdentifier(`make${(0, string_1.firstLetterUpperCase)(briefNames[idx])}Exception`))])), factory.createStringLiteral(dep), undefined));
|
||||
});
|
||||
const stmt3 = statements[3];
|
||||
const funcStmt0 = stmt3.body?.statements[0];
|
||||
(0, assert_1.default)(ts.isVariableStatement(funcStmt0));
|
||||
const vd = funcStmt0.declarationList.declarations[0];
|
||||
const { name, initializer } = vd;
|
||||
(0, assert_1.default)(ts.isIdentifier(name) && name.text === 'e');
|
||||
(0, assert_1.default)(ts.isCallExpression(initializer));
|
||||
const callExpressions = briefNames.map(ele => factory.createCallExpression(factory.createIdentifier(`make${(0, string_1.firstLetterUpperCase)(ele)}Exception`), [factory.createTypeReferenceNode(factory.createIdentifier("EntityDict"), undefined)], [factory.createIdentifier("data")]));
|
||||
const rightExpression = callExpressions.length === 1 ? callExpressions[0] :
|
||||
callExpressions.length === 2 ? factory.createBinaryExpression(callExpressions[0], factory.createToken(ts.SyntaxKind.BarBarToken), callExpressions[1]) : callExpressions.slice(2).reduce((prev, next) => factory.createBinaryExpression(prev, factory.createToken(ts.SyntaxKind.BarBarToken), next), factory.createBinaryExpression(callExpressions[0], factory.createToken(ts.SyntaxKind.BarBarToken), callExpressions[1]));
|
||||
Object.assign(vd, {
|
||||
initializer: factory.createBinaryExpression(initializer, factory.createToken(ts.SyntaxKind.BarBarToken), rightExpression)
|
||||
});
|
||||
statements2 = [
|
||||
...statements.slice(0, 3),
|
||||
...importStatements,
|
||||
...statements.slice(3)
|
||||
];
|
||||
}
|
||||
else {
|
||||
statements2 = [...sourceFile.statements];
|
||||
}
|
||||
const result = printer.printList(ts.ListFormat.SourceFileStatements, factory.createNodeArray(statements2), sourceFile);
|
||||
(0, fs_1.writeFileSync)(filename, result, { flag: 'w' });
|
||||
console.log(`构建${filename}文件成功`);
|
||||
}
|
||||
function outputRuntimeCxt(dependencies, briefNames, sourceFile, printer, filename) {
|
||||
let statements2 = [];
|
||||
if (dependencies.length > 0) {
|
||||
const { statements } = sourceFile;
|
||||
const importStatements = [];
|
||||
const stmt9 = statements[9];
|
||||
const stmt10 = statements[10];
|
||||
(0, assert_1.default)(ts.isTypeAliasDeclaration(stmt9) && ts.isIdentifier(stmt9.name) && stmt9.name.text === 'AAD');
|
||||
(0, assert_1.default)(ts.isTypeAliasDeclaration(stmt10) && ts.isIdentifier(stmt10.name) && stmt10.name.text === 'AFD');
|
||||
(0, assert_1.default)(ts.isImportDeclaration(statements[5]) && ts.isTypeAliasDeclaration(statements[6]));
|
||||
const AADs = [];
|
||||
const AFDs = [];
|
||||
dependencies.forEach((dep, idx) => {
|
||||
const featureName = `${(0, string_1.firstLetterUpperCase)(briefNames[idx])}FeatureDict`;
|
||||
const aspectName = `${(0, string_1.firstLetterUpperCase)(briefNames[idx])}AspectDict`;
|
||||
importStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports([
|
||||
factory.createImportSpecifier(false, factory.createIdentifier("FeatureDict"), factory.createIdentifier(featureName)),
|
||||
factory.createImportSpecifier(false, factory.createIdentifier("AspectDict"), factory.createIdentifier(aspectName))
|
||||
])), factory.createStringLiteral(dep), undefined));
|
||||
AFDs.push(featureName);
|
||||
AADs.push(aspectName);
|
||||
});
|
||||
{
|
||||
const { type } = stmt9;
|
||||
(0, assert_1.default)(ts.isTypeReferenceNode(type));
|
||||
Object.assign(stmt9, {
|
||||
type: factory.createIntersectionTypeNode([
|
||||
type,
|
||||
...AADs.map(ele => factory.createTypeReferenceNode(factory.createIdentifier(ele), [factory.createTypeReferenceNode('EntityDict')]))
|
||||
])
|
||||
});
|
||||
}
|
||||
{
|
||||
const { type } = stmt10;
|
||||
(0, assert_1.default)(ts.isIntersectionTypeNode(type));
|
||||
const { types } = type;
|
||||
Object.assign(type, {
|
||||
types: [
|
||||
...types,
|
||||
...AFDs.map(ele => factory.createTypeReferenceNode(factory.createIdentifier(ele), [factory.createTypeReferenceNode('EntityDict')]))
|
||||
]
|
||||
});
|
||||
}
|
||||
statements2 = [
|
||||
...statements.slice(0, 6),
|
||||
...importStatements,
|
||||
...statements.slice(6)
|
||||
];
|
||||
}
|
||||
else {
|
||||
statements2 = [
|
||||
...sourceFile.statements
|
||||
];
|
||||
}
|
||||
const result = printer.printList(ts.ListFormat.SourceFileStatements, factory.createNodeArray(statements2), sourceFile);
|
||||
(0, fs_1.writeFileSync)(filename, result, { flag: 'w' });
|
||||
console.log(`构建${filename}文件成功`);
|
||||
}
|
||||
function outputDependentContext(depGraph, printer, filename) {
|
||||
// 目前只支持单向依赖,未来可以利用mixin来实现多类的继承
|
||||
(0, assert_1.default)(depGraph.roots.length <= 1);
|
||||
let root = depGraph.roots[0] ? depGraph.roots[0].name : 'oak-frontend-base';
|
||||
const statements = [
|
||||
factory.createExportDeclaration(undefined, false, factory.createNamedExports([
|
||||
factory.createExportSpecifier(false, undefined, factory.createIdentifier("BackendRuntimeContext")),
|
||||
factory.createExportSpecifier(false, undefined, factory.createIdentifier("FrontendRuntimeContext"))
|
||||
]), factory.createStringLiteral(root), undefined)
|
||||
];
|
||||
const result = printer.printList(ts.ListFormat.SourceFileStatements, factory.createNodeArray(statements), ts.createSourceFile("someFileName.ts", "", ts.ScriptTarget.Latest, false, ts.ScriptKind.TS));
|
||||
(0, fs_1.writeFileSync)(filename, result, { flag: 'w' });
|
||||
console.log(`构建${filename}文件成功`);
|
||||
}
|
||||
/**
|
||||
* 生成initialize.prod.ts
|
||||
* @param cwd
|
||||
* @param dependencies
|
||||
* @param briefNames
|
||||
* @param sourceFile
|
||||
* @param printer
|
||||
*/
|
||||
function outputIntializeProd(cwd, dependencies, briefNames, sourceFile, printer, filename) {
|
||||
const { statements } = sourceFile;
|
||||
const objectDict = {};
|
||||
// 所有的import
|
||||
const importStatements = [];
|
||||
dependencies.forEach((dep, idx) => {
|
||||
const depDir = join(cwd, 'node_modules', dep);
|
||||
if (!(0, fs_1.existsSync)(depDir)) {
|
||||
throw new Error(`依赖模块${dep}未能找到相应的安装目录【${depDir}】`);
|
||||
}
|
||||
const esDir = join(depDir, 'es');
|
||||
const libDir = join(depDir, 'lib');
|
||||
const esDirExisted = (0, fs_1.existsSync)(esDir);
|
||||
const libDirExisted = (0, fs_1.existsSync)(libDir);
|
||||
if (!esDirExisted && !libDirExisted) {
|
||||
throw new Error(`依赖模块${dep}中没有es或者lib目录`);
|
||||
}
|
||||
const destDir = esDirExisted ? esDir : libDir;
|
||||
const destDirName = esDirExisted ? 'es' : 'lib';
|
||||
const objectDirs = ['checkers'];
|
||||
objectDirs.forEach((o) => {
|
||||
if ((0, fs_1.existsSync)(join(destDir, o))) {
|
||||
const variableName = `${briefNames[idx]}${(0, string_1.firstLetterUpperCase)(o)}`;
|
||||
importStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, factory.createIdentifier(variableName), undefined), factory.createStringLiteral(join(dep, destDirName, o)), undefined));
|
||||
if (objectDict[o]) {
|
||||
objectDict[o].push(variableName);
|
||||
}
|
||||
else {
|
||||
objectDict[o] = [variableName];
|
||||
}
|
||||
}
|
||||
});
|
||||
// common
|
||||
if ((0, fs_1.existsSync)(join(destDir, 'configuration'))) {
|
||||
const variableName = `${briefNames[idx]}Common`;
|
||||
importStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, factory.createIdentifier(variableName), undefined), factory.createStringLiteral(join(dep, destDirName, 'configuration')), undefined));
|
||||
if (objectDict.common) {
|
||||
objectDict.common.push(variableName);
|
||||
}
|
||||
else {
|
||||
objectDict.common = [variableName];
|
||||
}
|
||||
}
|
||||
// render
|
||||
if ((0, fs_1.existsSync)(join(destDir, 'configuration', 'render.js'))) {
|
||||
const variableName = `${briefNames[idx]}Render`;
|
||||
importStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, factory.createIdentifier(variableName), undefined), factory.createStringLiteral(join(dep, destDirName, 'configuration', 'render')), undefined));
|
||||
if (objectDict.render) {
|
||||
objectDict.render.push(variableName);
|
||||
}
|
||||
else {
|
||||
objectDict.render = [variableName];
|
||||
}
|
||||
}
|
||||
// features
|
||||
if ((0, fs_1.existsSync)(join(destDir, 'features'))) {
|
||||
importStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports([
|
||||
factory.createImportSpecifier(false, factory.createIdentifier("create"), factory.createIdentifier(`create${(0, string_1.firstLetterUpperCase)(briefNames[idx])}Features`)),
|
||||
factory.createImportSpecifier(false, factory.createIdentifier("FeatureDict"), factory.createIdentifier(`${(0, string_1.firstLetterUpperCase)(briefNames[idx])}FeatureDict`))
|
||||
])), factory.createStringLiteral(join(dep, destDirName, 'features')), undefined));
|
||||
if (objectDict.features) {
|
||||
objectDict.features.push(briefNames[idx]);
|
||||
}
|
||||
else {
|
||||
objectDict.features = [briefNames[idx]];
|
||||
}
|
||||
}
|
||||
});
|
||||
const funcStmt = statements.find((stmt) => ts.isFunctionDeclaration(stmt) && stmt.modifiers?.find(modifier => modifier.kind === ts.SyntaxKind.ExportKeyword) && stmt.modifiers.find(modifier => modifier.kind === ts.SyntaxKind.DefaultKeyword));
|
||||
(0, assert_1.default)(funcStmt);
|
||||
const idx = statements.indexOf(funcStmt);
|
||||
const statements2 = [
|
||||
...statements.slice(0, idx),
|
||||
...importStatements,
|
||||
...statements.slice(idx)
|
||||
];
|
||||
const stmt0 = funcStmt.body?.statements[0];
|
||||
(0, assert_1.default)(ts.isVariableStatement(stmt0));
|
||||
let vdl = stmt0.declarationList;
|
||||
vdl.declarations.forEach((declaration) => {
|
||||
const { on, expression } = destructVariableDeclaration(declaration);
|
||||
if (objectDict[on]) {
|
||||
const { elements } = expression;
|
||||
Object.assign(expression, {
|
||||
elements: elements.concat(...objectDict[on].map(ele => factory.createIdentifier(ele)))
|
||||
});
|
||||
}
|
||||
});
|
||||
if (objectDict.features) {
|
||||
const stmt1 = funcStmt.body?.statements[1];
|
||||
(0, assert_1.default)(ts.isVariableStatement(stmt1));
|
||||
const tfDec = stmt1.declarationList.declarations[0];
|
||||
const { name, initializer } = tfDec;
|
||||
(0, assert_1.default)(ts.isIdentifier(name) && name.text === 'totalFeatures');
|
||||
(0, assert_1.default)(ts.isAsExpression(initializer));
|
||||
const { type } = initializer;
|
||||
(0, assert_1.default)(ts.isIntersectionTypeNode(type));
|
||||
Object.assign(type, {
|
||||
types: type.types.concat(objectDict.features.map(ele => factory.createTypeReferenceNode(`${(0, string_1.firstLetterUpperCase)(ele)}FeatureDict`, [
|
||||
factory.createTypeReferenceNode('EntityDict')
|
||||
])))
|
||||
});
|
||||
Object.assign(funcStmt.body, {
|
||||
statements: [
|
||||
...funcStmt.body.statements.slice(0, 4),
|
||||
...objectDict.features.map((ele) => [
|
||||
factory.createVariableStatement(undefined, factory.createVariableDeclarationList([factory.createVariableDeclaration(factory.createIdentifier(`${ele}Features`), undefined, undefined, factory.createCallExpression(factory.createIdentifier(`create${(0, string_1.firstLetterUpperCase)(ele)}Features`), undefined, [factory.createIdentifier("totalFeatures")]))], ts.NodeFlags.Const)),
|
||||
factory.createExpressionStatement(factory.createCallExpression(factory.createPropertyAccessExpression(factory.createIdentifier("Object"), factory.createIdentifier("assign")), undefined, [
|
||||
factory.createIdentifier("totalFeatures"),
|
||||
factory.createIdentifier(`${ele}Features`)
|
||||
]))
|
||||
]).flat(),
|
||||
...funcStmt.body.statements.slice(4),
|
||||
]
|
||||
});
|
||||
}
|
||||
const result = printer.printList(ts.ListFormat.SourceFileStatements, factory.createNodeArray(statements2), sourceFile);
|
||||
(0, fs_1.writeFileSync)(filename, result, { flag: 'w' });
|
||||
console.log(`构建${filename}文件成功`);
|
||||
}
|
||||
/**
|
||||
* 生成initialize.dev.ts
|
||||
* @param cwd
|
||||
* @param dependencies
|
||||
* @param briefNames
|
||||
* @param sourceFile
|
||||
* @param printer
|
||||
*/
|
||||
function outputIntializeDev(cwd, dependencies, briefNames, sourceFile, printer, filename) {
|
||||
const { statements } = sourceFile;
|
||||
const objectDict = {};
|
||||
// 所有的import
|
||||
const importStatements = [];
|
||||
dependencies.forEach((dep, idx) => {
|
||||
const depDir = join(cwd, 'node_modules', dep);
|
||||
if (!(0, fs_1.existsSync)(depDir)) {
|
||||
throw new Error(`依赖模块${dep}未能找到相应的安装目录【${depDir}】`);
|
||||
}
|
||||
const esDir = join(depDir, 'es');
|
||||
const libDir = join(depDir, 'lib');
|
||||
const esDirExisted = (0, fs_1.existsSync)(esDir);
|
||||
const libDirExisted = (0, fs_1.existsSync)(libDir);
|
||||
if (!esDirExisted && !libDirExisted) {
|
||||
throw new Error(`依赖模块${dep}中没有es或者lib目录`);
|
||||
}
|
||||
const destDir = esDirExisted ? esDir : libDir;
|
||||
const destDirName = esDirExisted ? 'es' : 'lib';
|
||||
const objectDirs = ['triggers', 'checkers', 'watchers', 'timers', 'data', 'aspects'];
|
||||
objectDirs.forEach((o) => {
|
||||
if ((0, fs_1.existsSync)(join(destDir, o))) {
|
||||
const variableName = `${briefNames[idx]}${(0, string_1.firstLetterUpperCase)(o)}`;
|
||||
importStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, factory.createIdentifier(variableName), undefined), factory.createStringLiteral(join(dep, destDirName, o)), undefined));
|
||||
if (objectDict[o]) {
|
||||
objectDict[o].push(variableName);
|
||||
}
|
||||
else {
|
||||
objectDict[o] = [variableName];
|
||||
}
|
||||
}
|
||||
});
|
||||
// startRoutine
|
||||
if ((0, fs_1.existsSync)(join(destDir, 'routines', 'start'))) {
|
||||
const variableName = `${briefNames[idx]}StartRoutines`;
|
||||
importStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, factory.createIdentifier(variableName), undefined), factory.createStringLiteral(join(dep, destDirName, 'routines/start')), undefined));
|
||||
if (objectDict.startRoutines) {
|
||||
objectDict.startRoutines.push(variableName);
|
||||
}
|
||||
else {
|
||||
objectDict.startRoutines = [variableName];
|
||||
}
|
||||
}
|
||||
// common
|
||||
if ((0, fs_1.existsSync)(join(destDir, 'configuration'))) {
|
||||
const variableName = `${briefNames[idx]}Common`;
|
||||
importStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, factory.createIdentifier(variableName), undefined), factory.createStringLiteral(join(dep, destDirName, 'configuration')), undefined));
|
||||
if (objectDict.common) {
|
||||
objectDict.common.push(variableName);
|
||||
}
|
||||
else {
|
||||
objectDict.common = [variableName];
|
||||
}
|
||||
}
|
||||
// render
|
||||
if ((0, fs_1.existsSync)(join(destDir, 'configuration', 'render.js'))) {
|
||||
const variableName = `${briefNames[idx]}Render`;
|
||||
importStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, factory.createIdentifier(variableName), undefined), factory.createStringLiteral(join(dep, destDirName, 'configuration', 'render')), undefined));
|
||||
if (objectDict.render) {
|
||||
objectDict.render.push(variableName);
|
||||
}
|
||||
else {
|
||||
objectDict.render = [variableName];
|
||||
}
|
||||
}
|
||||
// features
|
||||
if ((0, fs_1.existsSync)(join(destDir, 'features'))) {
|
||||
importStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports([
|
||||
factory.createImportSpecifier(false, factory.createIdentifier("create"), factory.createIdentifier(`create${(0, string_1.firstLetterUpperCase)(briefNames[idx])}Features`)),
|
||||
factory.createImportSpecifier(false, factory.createIdentifier("FeatureDict"), factory.createIdentifier(`${(0, string_1.firstLetterUpperCase)(briefNames[idx])}FeatureDict`))
|
||||
])), factory.createStringLiteral(join(dep, destDirName, 'features')), undefined));
|
||||
if (objectDict.features) {
|
||||
objectDict.features.push(briefNames[idx]);
|
||||
}
|
||||
else {
|
||||
objectDict.features = [briefNames[idx]];
|
||||
}
|
||||
}
|
||||
// ports
|
||||
if ((0, fs_1.existsSync)(join(destDir, 'ports'))) {
|
||||
const importVariableName = `${briefNames[idx]}Importations`;
|
||||
const exportVariableName = `${briefNames[idx]}Exportations`;
|
||||
importStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports([
|
||||
factory.createImportSpecifier(false, factory.createIdentifier("importations"), factory.createIdentifier(importVariableName)),
|
||||
factory.createImportSpecifier(false, factory.createIdentifier("exportations"), factory.createIdentifier(exportVariableName))
|
||||
])), factory.createStringLiteral(join(dep, destDirName, 'ports')), undefined));
|
||||
if (objectDict.importations) {
|
||||
objectDict.importations.push(importVariableName);
|
||||
}
|
||||
else {
|
||||
objectDict.importations = [importVariableName];
|
||||
}
|
||||
if (objectDict.exportations) {
|
||||
objectDict.exportations.push(exportVariableName);
|
||||
}
|
||||
else {
|
||||
objectDict.exportations = [exportVariableName];
|
||||
}
|
||||
}
|
||||
});
|
||||
const funcStmt = statements.find((stmt) => ts.isFunctionDeclaration(stmt) && stmt.modifiers?.find(modifier => modifier.kind === ts.SyntaxKind.ExportKeyword) && stmt.modifiers.find(modifier => modifier.kind === ts.SyntaxKind.DefaultKeyword));
|
||||
(0, assert_1.default)(funcStmt);
|
||||
const idx = statements.indexOf(funcStmt);
|
||||
const statements2 = [
|
||||
...statements.slice(0, idx),
|
||||
...importStatements,
|
||||
...statements.slice(idx)
|
||||
];
|
||||
const stmt0 = funcStmt.body?.statements[0];
|
||||
(0, assert_1.default)(ts.isVariableStatement(stmt0));
|
||||
let vdl = stmt0.declarationList;
|
||||
vdl.declarations.forEach((declaration) => {
|
||||
const { on, expression } = destructVariableDeclaration(declaration);
|
||||
if (objectDict[on]) {
|
||||
const { elements } = expression;
|
||||
Object.assign(expression, {
|
||||
elements: elements.concat(...objectDict[on].map(ele => factory.createIdentifier(ele)))
|
||||
});
|
||||
}
|
||||
});
|
||||
if (objectDict.features) {
|
||||
const stmt2 = funcStmt.body?.statements[2];
|
||||
(0, assert_1.default)(ts.isVariableStatement(stmt2));
|
||||
const tfDec = stmt2.declarationList.declarations[0];
|
||||
const { name, initializer } = tfDec;
|
||||
(0, assert_1.default)(ts.isIdentifier(name) && name.text === 'totalFeatures');
|
||||
(0, assert_1.default)(ts.isAsExpression(initializer));
|
||||
const { type } = initializer;
|
||||
(0, assert_1.default)(ts.isIntersectionTypeNode(type));
|
||||
Object.assign(type, {
|
||||
types: type.types.concat(objectDict.features.map(ele => factory.createTypeReferenceNode(`${(0, string_1.firstLetterUpperCase)(ele)}FeatureDict`, [
|
||||
factory.createTypeReferenceNode('EntityDict')
|
||||
])))
|
||||
});
|
||||
Object.assign(funcStmt.body, {
|
||||
statements: [
|
||||
...funcStmt.body.statements.slice(0, 5),
|
||||
...objectDict.features.map((ele) => [
|
||||
factory.createVariableStatement(undefined, factory.createVariableDeclarationList([factory.createVariableDeclaration(factory.createIdentifier(`${ele}Features`), undefined, undefined, factory.createCallExpression(factory.createIdentifier(`create${(0, string_1.firstLetterUpperCase)(ele)}Features`), undefined, [factory.createIdentifier("totalFeatures")]))], ts.NodeFlags.Const)),
|
||||
factory.createExpressionStatement(factory.createCallExpression(factory.createPropertyAccessExpression(factory.createIdentifier("Object"), factory.createIdentifier("assign")), undefined, [
|
||||
factory.createIdentifier("totalFeatures"),
|
||||
factory.createIdentifier(`${ele}Features`)
|
||||
]))
|
||||
]).flat(),
|
||||
...funcStmt.body.statements.slice(5),
|
||||
]
|
||||
});
|
||||
}
|
||||
const result = printer.printList(ts.ListFormat.SourceFileStatements, factory.createNodeArray(statements2), sourceFile);
|
||||
(0, fs_1.writeFileSync)(filename, result, { flag: 'w' });
|
||||
console.log(`构建${filename}文件成功`);
|
||||
}
|
||||
/**
|
||||
* 根据依赖关系,输出features/index.ts
|
||||
* @param cwd
|
||||
* @param dependencies
|
||||
* @param briefNames
|
||||
* @param printer
|
||||
* @param filename
|
||||
*/
|
||||
function outputFeatureIndex(cwd, dependencies, briefNames, printer, filename) {
|
||||
// todo
|
||||
}
|
||||
function outputIntializeFeatures(cwd, dependencies, briefNames, sourceFile, printer, filename) {
|
||||
const { statements } = sourceFile;
|
||||
const features = [];
|
||||
// 所有的import
|
||||
const importStatements = [];
|
||||
// 如果有oak-general-business,需要AccessConfiguration,自动注入
|
||||
if (dependencies.includes('oak-general-business')) {
|
||||
importStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, factory.createIdentifier("accessConfiguration"), undefined), factory.createStringLiteral("@project/configuration/access"), undefined));
|
||||
}
|
||||
dependencies.forEach((dep, idx) => {
|
||||
const depDir = join(cwd, 'node_modules', dep);
|
||||
if (!(0, fs_1.existsSync)(depDir)) {
|
||||
throw new Error(`依赖模块${dep}未能找到相应的安装目录【${depDir}】`);
|
||||
}
|
||||
const esDir = join(depDir, 'es');
|
||||
const libDir = join(depDir, 'lib');
|
||||
const esDirExisted = (0, fs_1.existsSync)(esDir);
|
||||
const libDirExisted = (0, fs_1.existsSync)(libDir);
|
||||
if (!esDirExisted && !libDirExisted) {
|
||||
throw new Error(`依赖模块${dep}中没有es或者lib目录`);
|
||||
}
|
||||
const destDir = esDirExisted ? esDir : libDir;
|
||||
const destDirName = esDirExisted ? 'es' : 'lib';
|
||||
// features
|
||||
if ((0, fs_1.existsSync)(join(destDir, 'features'))) {
|
||||
importStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports([
|
||||
factory.createImportSpecifier(false, factory.createIdentifier("initialize"), factory.createIdentifier(`initialize${(0, string_1.firstLetterUpperCase)(briefNames[idx])}Features`)),
|
||||
factory.createImportSpecifier(false, factory.createIdentifier("FeatureDict"), factory.createIdentifier(`${(0, string_1.firstLetterUpperCase)(briefNames[idx])}FeatureDict`))
|
||||
])), factory.createStringLiteral(join(dep, destDirName, 'features')), undefined));
|
||||
features.push(briefNames[idx]);
|
||||
}
|
||||
});
|
||||
const funcStmt = statements.find((stmt) => ts.isFunctionDeclaration(stmt) && stmt.modifiers?.find(modifier => modifier.kind === ts.SyntaxKind.ExportKeyword) && stmt.modifiers.find(modifier => modifier.kind === ts.SyntaxKind.DefaultKeyword));
|
||||
(0, assert_1.default)(funcStmt);
|
||||
const idx = statements.indexOf(funcStmt);
|
||||
const statements2 = [
|
||||
...statements.slice(0, idx),
|
||||
...importStatements,
|
||||
...statements.slice(idx)
|
||||
];
|
||||
if (features.length > 0) {
|
||||
(0, assert_1.default)(funcStmt.parameters.length === 1);
|
||||
const [param] = funcStmt.parameters;
|
||||
const { type } = param;
|
||||
(0, assert_1.default)(ts.isIntersectionTypeNode(type));
|
||||
Object.assign(type, {
|
||||
types: type.types.concat(features.map(ele => factory.createTypeReferenceNode(`${(0, string_1.firstLetterUpperCase)(ele)}FeatureDict`, [
|
||||
factory.createTypeReferenceNode('EntityDict')
|
||||
])))
|
||||
});
|
||||
Object.assign(funcStmt.body, {
|
||||
statements: [
|
||||
...features.map((ele, idx) => {
|
||||
const args = [
|
||||
factory.createIdentifier("features"),
|
||||
];
|
||||
if (dependencies[idx] === 'oak-general-business') {
|
||||
args.push(factory.createIdentifier("accessConfiguration"));
|
||||
}
|
||||
return factory.createExpressionStatement(factory.createAwaitExpression(factory.createCallExpression(factory.createIdentifier(`initialize${(0, string_1.firstLetterUpperCase)(ele)}Features`), undefined, args)));
|
||||
}).flat(),
|
||||
...funcStmt.body.statements,
|
||||
]
|
||||
});
|
||||
}
|
||||
const result = printer.printList(ts.ListFormat.SourceFileStatements, factory.createNodeArray(statements2), sourceFile);
|
||||
(0, fs_1.writeFileSync)(filename, result, { flag: 'w' });
|
||||
console.log(`构建${filename}文件成功`);
|
||||
}
|
||||
function injectDataIndexFile(dataIndexFile, briefNames, printer) {
|
||||
const sourceFile = ts.createSourceFile('index.ts', (0, fs_1.readFileSync)(dataIndexFile, 'utf-8'), ts.ScriptTarget.Latest, false, ts.ScriptKind.TS);
|
||||
const { statements } = sourceFile;
|
||||
const importStatements = briefNames.map((ele) => factory.createImportDeclaration(undefined, factory.createImportClause(false, factory.createIdentifier(`${ele}Data`), undefined), factory.createStringLiteral(`./${ele}Data`), undefined));
|
||||
/**
|
||||
* 在文件末尾的这个位置上注入引用
|
||||
export default {
|
||||
relation: relations,
|
||||
actionAuth,
|
||||
relationAuth,
|
||||
path,
|
||||
i18n,
|
||||
};
|
||||
*/
|
||||
const exportStmt = statements[statements.length - 1];
|
||||
(0, assert_1.default)(ts.isExportAssignment(exportStmt));
|
||||
const { expression } = exportStmt;
|
||||
(0, assert_1.default)(ts.isObjectLiteralExpression(expression));
|
||||
const { properties } = expression;
|
||||
Object.assign(expression, {
|
||||
properties: [
|
||||
...properties,
|
||||
...briefNames.map((ele) => factory.createSpreadAssignment(factory.createIdentifier(`${ele}Data`)))
|
||||
]
|
||||
});
|
||||
const statements2 = [
|
||||
...importStatements,
|
||||
...statements,
|
||||
];
|
||||
const result = printer.printList(ts.ListFormat.SourceFileStatements, factory.createNodeArray(statements2), sourceFile);
|
||||
(0, fs_1.writeFileSync)(dataIndexFile, result, { flag: 'w' });
|
||||
console.log(`注入${dataIndexFile}文件成功,共注入了${briefNames.length}个初始化数据引用`);
|
||||
}
|
||||
/**
|
||||
* 尝试将pages目录下的页面移到项目目录中。
|
||||
* 目前简化处理,假设目录结构都是pages/namespace/entity结构,以entity目录作为单元,如果有就放弃,没有就移植
|
||||
* @param cwdPageDir
|
||||
* @param modulePageDir
|
||||
*/
|
||||
function tryCopyPages(cwdPageDir, modulePageDir) {
|
||||
// 各个namespace处理
|
||||
const nss = (0, fs_1.readdirSync)(modulePageDir);
|
||||
nss.forEach((namespace) => {
|
||||
const pages = (0, fs_1.readdirSync)(join(modulePageDir, namespace));
|
||||
pages.forEach((page) => {
|
||||
const destDir = join(cwdPageDir, namespace, page);
|
||||
if (!(0, fs_1.existsSync)(destDir)) {
|
||||
(0, fs_extra_1.mkdirSync)(destDir);
|
||||
const srcDir = join(modulePageDir, namespace, page);
|
||||
console.log(`拷贝${srcDir}到${destDir}下`);
|
||||
(0, fs_extra_1.copySync)(srcDir, destDir, {
|
||||
recursive: true,
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
/**
|
||||
* 对各个依赖项目,可能有些文件需要被移植到项目目录下,逐步完善
|
||||
* @param cwd
|
||||
* @param dependencies
|
||||
* @param briefNames
|
||||
*/
|
||||
function tryCopyModuleTemplateFiles(cwd, dependencies, briefNames, printer) {
|
||||
const injectDataIndexFileDependencies = [];
|
||||
const injectDataIndexFileBriefNames = [];
|
||||
dependencies.forEach((dep, idx) => {
|
||||
const moduleDir = join(cwd, 'node_modules', dep);
|
||||
const moduleTemplateDir = join(moduleDir, 'template');
|
||||
if ((0, fs_1.existsSync)(moduleTemplateDir)) {
|
||||
const entitiesDir = join(moduleTemplateDir, 'entities');
|
||||
if ((0, fs_1.existsSync)(entitiesDir)) {
|
||||
// entities目录下的定义直接拷贝过去(如果尚不存在的话)
|
||||
const prjEntitiesDir = join(cwd, 'src', 'entities');
|
||||
const result = (0, fs_1.readdirSync)(entitiesDir);
|
||||
result.forEach((filename) => {
|
||||
if (!(0, fs_1.existsSync)(join(prjEntitiesDir, filename))) {
|
||||
console.log(`拷贝${join(entitiesDir, filename)}到${prjEntitiesDir}目录下`);
|
||||
(0, fs_extra_1.copySync)(join(entitiesDir, filename), join(prjEntitiesDir, filename));
|
||||
}
|
||||
});
|
||||
}
|
||||
// data.ts中规定的初始化数据,拷贝到data目录下,并注入到data/index.ts
|
||||
const dataFile = join(moduleTemplateDir, 'data.ts');
|
||||
if ((0, fs_1.existsSync)(dataFile)) {
|
||||
const prjDataFile = join(cwd, 'src', 'data', `${briefNames[idx]}Data.ts`);
|
||||
if (!(0, fs_1.existsSync)(prjDataFile)) {
|
||||
console.log(`拷贝${dataFile}到${prjDataFile}中`);
|
||||
(0, fs_extra_1.copySync)(dataFile, prjDataFile);
|
||||
injectDataIndexFileDependencies.push(dep);
|
||||
injectDataIndexFileBriefNames.push(briefNames[idx]);
|
||||
}
|
||||
}
|
||||
// pages中设计的页面,拷贝到pages对应的目录下,考虑namespace
|
||||
const pageDir = join(moduleTemplateDir, 'pages');
|
||||
if ((0, fs_1.existsSync)(pageDir)) {
|
||||
tryCopyPages(join(cwd, 'src', 'pages'), pageDir);
|
||||
}
|
||||
}
|
||||
});
|
||||
if (injectDataIndexFileBriefNames.length > 0) {
|
||||
injectDataIndexFile(join(cwd, 'src', 'data', 'index.ts'), injectDataIndexFileBriefNames, printer);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* 本函数用于构建src/initialize.dev, src/initialize.prod, src/initializeFeatures, src/context/FrontendContext, src/contextBackendContext
|
||||
* 这些和dependency相关的项目文件
|
||||
*/
|
||||
function buildDependency(rebuild) {
|
||||
const cwd = process.cwd();
|
||||
const depConfigFile = join(cwd, 'src', 'configuration', 'dependency.ts');
|
||||
if (!(0, fs_1.existsSync)(depConfigFile)) {
|
||||
console.error(`${depConfigFile}不存在,无法构建启动文件`);
|
||||
}
|
||||
const depGraph = analyzeDepedency(cwd);
|
||||
// 依赖如果是树形关系,应当从底层的被依赖者开始初始化
|
||||
const dependencies = depGraph.ascOrder;
|
||||
const briefNames = dependencies.map((dep, idx) => `${dep.split('-').map(ele => ele[0]).join('')}${idx}`);
|
||||
const templateFileList = [
|
||||
join(cwd, 'node_modules', env_1.OAK_CLI_MODULE_NAME, 'templateFiles', 'initialize.dev.ts'),
|
||||
join(cwd, 'node_modules', env_1.OAK_CLI_MODULE_NAME, 'templateFiles', 'initialize.prod.ts'),
|
||||
join(cwd, 'node_modules', env_1.OAK_CLI_MODULE_NAME, 'templateFiles', 'initializeFeatures.ts'),
|
||||
join(cwd, 'node_modules', env_1.OAK_CLI_MODULE_NAME, 'templateFiles', 'RuntimeCxt.ts'),
|
||||
join(cwd, 'node_modules', env_1.OAK_CLI_MODULE_NAME, 'templateFiles', 'DependentExceptions.ts'),
|
||||
join(cwd, 'node_modules', env_1.OAK_CLI_MODULE_NAME, 'templateFiles', 'polyfill.d.ts')
|
||||
];
|
||||
const program = ts.createProgram(templateFileList, {});
|
||||
const printer = ts.createPrinter({ newLine: ts.NewLineKind.LineFeed });
|
||||
const initDevFile = join(cwd, 'src', 'initialize.dev.ts');
|
||||
if ((0, fs_1.existsSync)(initDevFile) && !rebuild) {
|
||||
console.log(`[${initDevFile}]文件已经存在,无需构建启动文件`);
|
||||
}
|
||||
else {
|
||||
outputIntializeDev(cwd, dependencies, briefNames, program.getSourceFile(templateFileList[0]), printer, initDevFile);
|
||||
}
|
||||
const initProdFile = join(cwd, 'src', 'initialize.prod.ts');
|
||||
if ((0, fs_1.existsSync)(initProdFile) && !rebuild) {
|
||||
console.log(`[${initProdFile}]文件已经存在,无需构建启动文件`);
|
||||
}
|
||||
else {
|
||||
outputIntializeProd(cwd, dependencies, briefNames, program.getSourceFile(templateFileList[1]), printer, initProdFile);
|
||||
}
|
||||
const initFeaturesFile = join(cwd, 'src', 'initializeFeatures.ts');
|
||||
if ((0, fs_1.existsSync)(initFeaturesFile) && !rebuild) {
|
||||
console.log(`[${initFeaturesFile}]文件已经存在,无需构建启动文件`);
|
||||
}
|
||||
else {
|
||||
outputIntializeFeatures(cwd, dependencies, briefNames, program.getSourceFile(templateFileList[2]), printer, initFeaturesFile);
|
||||
}
|
||||
const dependentContextFile = join(cwd, 'src', 'context', 'DependentContext.ts');
|
||||
if ((0, fs_1.existsSync)(dependentContextFile) && !rebuild) {
|
||||
console.log(`[${dependentContextFile}]文件已经存在,无需构建启动文件`);
|
||||
}
|
||||
else {
|
||||
outputDependentContext(depGraph, printer, dependentContextFile);
|
||||
}
|
||||
const runtimeCxtFile = join(cwd, 'src', 'types', 'RuntimeCxt.ts');
|
||||
if ((0, fs_1.existsSync)(runtimeCxtFile) && !rebuild) {
|
||||
console.log(`[${runtimeCxtFile}]文件已经存在,无需构建启动文件`);
|
||||
}
|
||||
else {
|
||||
outputRuntimeCxt(dependencies, briefNames, program.getSourceFile(templateFileList[3]), printer, runtimeCxtFile);
|
||||
}
|
||||
const dependentExceptionsFile = join(cwd, 'src', 'types', 'DependentExceptions.ts');
|
||||
if ((0, fs_1.existsSync)(dependentExceptionsFile) && !rebuild) {
|
||||
console.log(`[${dependentExceptionsFile}]文件已经存在,无需构建启动文件`);
|
||||
}
|
||||
else {
|
||||
outputDependentExceptions(dependencies, briefNames, program.getSourceFile(templateFileList[4]), printer, dependentExceptionsFile);
|
||||
}
|
||||
const polyfillDtsFile = join(cwd, 'typings', 'polyfill.d.ts');
|
||||
if ((0, fs_1.existsSync)(polyfillDtsFile) && !rebuild) {
|
||||
console.log(`[${polyfillDtsFile}]文件已经存在,无需构建启动文件`);
|
||||
}
|
||||
else {
|
||||
outputPolyfillDts(dependencies, briefNames, program.getSourceFile(templateFileList[5]), printer, polyfillDtsFile);
|
||||
}
|
||||
// 把各个依赖项目的一些初始化的文件拷贝过去
|
||||
tryCopyModuleTemplateFiles(cwd, dependencies, briefNames, printer);
|
||||
}
|
||||
exports.default = buildDependency;
|
||||
|
|
@ -9,5 +9,5 @@ export declare const STRING_LITERAL_MAX_LENGTH = 24;
|
|||
export declare const NUMERICAL_LITERL_DEFAULT_PRECISION = 8;
|
||||
export declare const NUMERICAL_LITERL_DEFAULT_SCALE = 2;
|
||||
export declare const INT_LITERL_DEFAULT_WIDTH = 4;
|
||||
export declare const OAK_EXTERNAL_LIBS_FILEPATH: (path: string) => string;
|
||||
export * from './entities';
|
||||
export declare const OAK_CLI_MODULE_NAME = "@xuchangzju/oak-cli";
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.OAK_EXTERNAL_LIBS_FILEPATH = exports.INT_LITERL_DEFAULT_WIDTH = exports.NUMERICAL_LITERL_DEFAULT_SCALE = exports.NUMERICAL_LITERL_DEFAULT_PRECISION = exports.STRING_LITERAL_MAX_LENGTH = exports.ENTITY_NAME_MAX_LENGTH = exports.ACTION_CONSTANT_IN_OAK_DOMAIN = exports.TYPE_PATH_IN_OAK_DOMAIN = exports.ENTITY_PATH_IN_OAK_DOMAIN = exports.ENTITY_PATH_IN_OAK_GENERAL_BUSINESS = exports.LIB_PATH = exports.LIB_OAK_DOMAIN = void 0;
|
||||
exports.OAK_CLI_MODULE_NAME = exports.INT_LITERL_DEFAULT_WIDTH = exports.NUMERICAL_LITERL_DEFAULT_SCALE = exports.NUMERICAL_LITERL_DEFAULT_PRECISION = exports.STRING_LITERAL_MAX_LENGTH = exports.ENTITY_NAME_MAX_LENGTH = exports.ACTION_CONSTANT_IN_OAK_DOMAIN = exports.TYPE_PATH_IN_OAK_DOMAIN = exports.ENTITY_PATH_IN_OAK_DOMAIN = exports.ENTITY_PATH_IN_OAK_GENERAL_BUSINESS = exports.LIB_PATH = exports.LIB_OAK_DOMAIN = void 0;
|
||||
const tslib_1 = require("tslib");
|
||||
const path_1 = tslib_1.__importDefault(require("path"));
|
||||
exports.LIB_OAK_DOMAIN = 'oak-domain';
|
||||
const LIB_OAK_GENERAL_BUSINESS = 'oak-general-business';
|
||||
const LIB_PATH = () => 'lib';
|
||||
|
|
@ -36,10 +35,5 @@ exports.STRING_LITERAL_MAX_LENGTH = 24;
|
|||
exports.NUMERICAL_LITERL_DEFAULT_PRECISION = 8;
|
||||
exports.NUMERICAL_LITERL_DEFAULT_SCALE = 2;
|
||||
exports.INT_LITERL_DEFAULT_WIDTH = 4;
|
||||
// 暂放在这儿
|
||||
// 项目依赖的第三方oak lib配置文件所在的固定路径
|
||||
const OAK_EXTERNAL_LIBS_FILEPATH = (path) => {
|
||||
return path_1.default.join(path, 'config/oakExternalLib.json');
|
||||
};
|
||||
exports.OAK_EXTERNAL_LIBS_FILEPATH = OAK_EXTERNAL_LIBS_FILEPATH;
|
||||
tslib_1.__exportStar(require("./entities"), exports);
|
||||
exports.OAK_CLI_MODULE_NAME = '@xuchangzju/oak-cli';
|
||||
|
|
|
|||
|
|
@ -7,8 +7,8 @@ const { factory } = ts;
|
|||
const path_1 = require("path");
|
||||
const crypto_1 = require("crypto");
|
||||
const fs_1 = tslib_1.__importDefault(require("fs"));
|
||||
const env_1 = require("./env");
|
||||
const string_1 = require("../utils/string");
|
||||
const dependencyBuilder_1 = require("./dependencyBuilder");
|
||||
/**
|
||||
* 将一个object展开编译为一棵语法树,只有string和object两种键值对象
|
||||
* @param data
|
||||
|
|
@ -36,9 +36,10 @@ class LocaleBuilder {
|
|||
const pwd = process.cwd();
|
||||
this.pwd = pwd;
|
||||
this.asLib = !!asLib;
|
||||
const dependencyFile = (0, env_1.OAK_EXTERNAL_LIBS_FILEPATH)((0, path_1.join)(pwd, 'src'));
|
||||
if (fs_1.default.existsSync(dependencyFile)) {
|
||||
this.dependencies = require(dependencyFile);
|
||||
const dependencyConfigureFile = (0, path_1.join)(pwd, 'src', 'configuration', 'dependency.ts');
|
||||
if (fs_1.default.existsSync(dependencyConfigureFile)) {
|
||||
const depGraph = (0, dependencyBuilder_1.analyzeDepedency)(pwd);
|
||||
this.dependencies = depGraph.ascOrder;
|
||||
}
|
||||
else {
|
||||
this.dependencies = [];
|
||||
|
|
@ -132,9 +133,14 @@ class LocaleBuilder {
|
|||
this.locales[ns] = [module, position.replace(/\\/g, '/'), language, data];
|
||||
if (watch) {
|
||||
fs_1.default.watch(filepath, () => {
|
||||
const data = this.readLocaleFileContent(filepath);
|
||||
this.locales[ns] = [module, position.replace(/\\/g, '/'), language, data];
|
||||
this.outputDataFile();
|
||||
try {
|
||||
const data = this.readLocaleFileContent(filepath);
|
||||
this.locales[ns] = [module, position.replace(/\\/g, '/'), language, data];
|
||||
this.outputDataFile();
|
||||
}
|
||||
catch (err) {
|
||||
// 啥都不干
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ const fs_extra_1 = require("fs-extra");
|
|||
const assert_1 = tslib_1.__importDefault(require("assert"));
|
||||
const ts = tslib_1.__importStar(require("typescript"));
|
||||
const node_watch_1 = tslib_1.__importDefault(require("node-watch"));
|
||||
const env_1 = require("./env");
|
||||
const { factory } = ts;
|
||||
const NameSpaceDescDict = {};
|
||||
function checkPageDir(dir, relativePath, ns, type) {
|
||||
|
|
@ -160,46 +161,34 @@ function judgeUseOakRouterBuilder(statements) {
|
|||
return ts.isExpressionStatement(stmt) && ts.isStringLiteral(stmt.expression) && stmt.expression.text === 'use oak router builder';
|
||||
}
|
||||
function outputInWebAppDir(appDir) {
|
||||
const routerFileName = (0, path_1.join)(appDir, 'router', 'allRouters.ts');
|
||||
const templateFileName = (0, path_1.join)(appDir, 'router', 'allRoutersTemplate.ts');
|
||||
if ((0, fs_extra_1.existsSync)(templateFileName)) {
|
||||
const program = ts.createProgram([templateFileName], {
|
||||
removeComments: false,
|
||||
});
|
||||
const routerFile = program.getSourceFile(templateFileName);
|
||||
(0, assert_1.default)(routerFile);
|
||||
const namespaceDir = (0, path_1.join)(appDir, 'namespaces');
|
||||
const { statements } = routerFile;
|
||||
if (judgeUseOakRouterBuilder(statements)) {
|
||||
statements.forEach((statement) => {
|
||||
if (ts.isVariableStatement(statement)) {
|
||||
const declaration = statement.declarationList.declarations.find(declaration => ts.isIdentifier(declaration.name) && declaration.name.text === 'allRouters');
|
||||
if (declaration) {
|
||||
Object.assign(declaration, {
|
||||
initializer: makeWebAllRouters(namespaceDir, (0, path_1.join)(appDir, '../../../..'), (0, path_1.dirname)(templateFileName))
|
||||
});
|
||||
}
|
||||
const routerFileName = (0, path_1.join)(appDir, 'routers', 'allRouters.ts');
|
||||
const templateFileName = (0, path_1.join)(appDir, '../../..', 'node_modules', env_1.OAK_CLI_MODULE_NAME, 'templateFiles', 'allRouters.ts');
|
||||
const program = ts.createProgram([templateFileName], {
|
||||
removeComments: false,
|
||||
});
|
||||
const routerFile = program.getSourceFile(templateFileName);
|
||||
(0, assert_1.default)(routerFile);
|
||||
const namespaceDir = (0, path_1.join)(appDir, 'namespaces');
|
||||
const { statements } = routerFile;
|
||||
if (judgeUseOakRouterBuilder(statements)) {
|
||||
statements.forEach((statement) => {
|
||||
if (ts.isVariableStatement(statement)) {
|
||||
const declaration = statement.declarationList.declarations.find(declaration => ts.isIdentifier(declaration.name) && declaration.name.text === 'allRouters');
|
||||
if (declaration) {
|
||||
Object.assign(declaration, {
|
||||
initializer: makeWebAllRouters(namespaceDir, (0, path_1.join)(appDir, '../../..'), (0, path_1.dirname)(routerFileName))
|
||||
});
|
||||
}
|
||||
});
|
||||
const printer = ts.createPrinter({ newLine: ts.NewLineKind.LineFeed, removeComments: false });
|
||||
const result = printer.printNode(ts.EmitHint.Unspecified, routerFile, routerFile);
|
||||
(0, fs_extra_1.writeFileSync)(routerFileName, result);
|
||||
}
|
||||
}
|
||||
else {
|
||||
console.warn(`${appDir}的目录结构未按照标准建立,缺少了${templateFileName},请从模板中补充`);
|
||||
}
|
||||
});
|
||||
const printer = ts.createPrinter({ newLine: ts.NewLineKind.LineFeed, removeComments: false });
|
||||
const result = printer.printNode(ts.EmitHint.Unspecified, routerFile, routerFile);
|
||||
(0, fs_extra_1.writeFileSync)(routerFileName, result);
|
||||
}
|
||||
}
|
||||
function outputInWebDir(dir) {
|
||||
const srcAppDir = (0, path_1.join)(dir, 'src', 'app');
|
||||
const apps = (0, fs_extra_1.readdirSync)(srcAppDir);
|
||||
apps.forEach((app) => {
|
||||
const appDir = (0, path_1.join)(srcAppDir, app);
|
||||
const stat = (0, fs_extra_1.statSync)(appDir);
|
||||
if (stat.isDirectory()) {
|
||||
outputInWebAppDir(appDir);
|
||||
}
|
||||
});
|
||||
outputInWebAppDir(srcAppDir);
|
||||
}
|
||||
function watchDir(projectDir, startupDir, type) {
|
||||
const srcPageDir = (0, path_1.join)(projectDir, 'src', 'pages');
|
||||
|
|
|
|||
|
|
@ -27,13 +27,24 @@ const ActionImportStatements = () => [
|
|||
factory.createImportSpecifier(false, undefined, factory.createIdentifier("RelationAction")),
|
||||
])), factory.createStringLiteral((0, env_1.ACTION_CONSTANT_IN_OAK_DOMAIN)()), undefined)
|
||||
];
|
||||
const StyleAsts = {};
|
||||
const ActionAsts = {};
|
||||
const SchemaAsts = {};
|
||||
/**
|
||||
* 部分项目目前存在引用了Schema但是不依赖于其包,因此目前先在此去重。
|
||||
* 后续要修正这种行为,让继承的schema分层编译
|
||||
* @param many
|
||||
* @param one
|
||||
* @param key
|
||||
* @param notNull
|
||||
*/
|
||||
function addRelationship(many, one, key, notNull) {
|
||||
const { [many]: manySet } = ManyToOne;
|
||||
const one2 = one === 'Schema' ? many : one;
|
||||
if (manySet) {
|
||||
manySet.push([one2, key, notNull]);
|
||||
if (!manySet.find(ele => ele[1] === key && ele[0] === one2)) {
|
||||
manySet.push([one2, key, notNull]);
|
||||
}
|
||||
}
|
||||
else {
|
||||
(0, lodash_1.assign)(ManyToOne, {
|
||||
|
|
@ -42,7 +53,9 @@ function addRelationship(many, one, key, notNull) {
|
|||
}
|
||||
const { [one2]: oneSet } = OneToMany;
|
||||
if (oneSet) {
|
||||
oneSet.push([many, key, notNull]);
|
||||
if (!oneSet.find(ele => ele[1] === key && ele[0] === many)) {
|
||||
oneSet.push([many, key, notNull]);
|
||||
}
|
||||
}
|
||||
else {
|
||||
(0, lodash_1.assign)(OneToMany, {
|
||||
|
|
@ -675,7 +688,9 @@ function analyzeSchemaDefinition(node, moduleName, filename, path, program, refe
|
|||
// 这也是一对多的反指定义
|
||||
const reverseEntity = typeName.text;
|
||||
if (ReversePointerRelations[reverseEntity]) {
|
||||
ReversePointerRelations[reverseEntity].push(moduleName);
|
||||
if (!ReversePointerRelations[reverseEntity].includes(moduleName)) {
|
||||
ReversePointerRelations[reverseEntity].push(moduleName);
|
||||
}
|
||||
}
|
||||
else {
|
||||
(0, lodash_1.assign)(ReversePointerRelations, {
|
||||
|
|
@ -795,6 +810,7 @@ function analyzeEntity(filename, path, program, relativePath) {
|
|||
if (Schema.hasOwnProperty(moduleName)) {
|
||||
delete ActionAsts[moduleName];
|
||||
delete SchemaAsts[moduleName];
|
||||
delete StyleAsts[moduleName];
|
||||
// removeFromRelationShip(moduleName);
|
||||
console.warn(`出现了同名的Entity定义「${moduleName}」,将使用${fullPath}取代掉默认对象,请检查新的对象结构及相关常量定义与原有的兼容,否则原有对象的相关逻辑会出现不可知异常`);
|
||||
}
|
||||
|
|
@ -1120,6 +1136,9 @@ function analyzeEntity(filename, path, program, relativePath) {
|
|||
_static = true; // static如果有值只能为true
|
||||
}
|
||||
};
|
||||
const dealWithStyleDesc = (declaration) => {
|
||||
StyleAsts[moduleName] = declaration;
|
||||
};
|
||||
const dealWithEntityDesc = (declaration) => {
|
||||
if (ts.isObjectLiteralExpression(declaration)) {
|
||||
const { properties } = declaration;
|
||||
|
|
@ -1136,6 +1155,11 @@ function analyzeEntity(filename, path, program, relativePath) {
|
|||
(0, assert_1.default)(ts.isPropertyAssignment(configurationProperty));
|
||||
dealWithConfiguration(configurationProperty.initializer);
|
||||
}
|
||||
const styleDescProperty = properties.find(ele => ts.isPropertyAssignment(ele) && ts.isIdentifier(ele.name) && ele.name.text === 'style');
|
||||
if (styleDescProperty) {
|
||||
(0, assert_1.default)(ts.isPropertyAssignment(styleDescProperty));
|
||||
dealWithStyleDesc(styleDescProperty.initializer);
|
||||
}
|
||||
}
|
||||
else if (ts.isIdentifier(declaration)) {
|
||||
const checker = program.getTypeChecker();
|
||||
|
|
@ -1146,7 +1170,6 @@ function analyzeEntity(filename, path, program, relativePath) {
|
|||
* 拿不到数据定义(在js中)(original.declaration.initializer是undefined)
|
||||
*/
|
||||
(0, assert_1.default)(false, '用变量赋值给entityDesc暂时还解析不了');
|
||||
console.log(original);
|
||||
}
|
||||
};
|
||||
declarations.forEach((declaration) => {
|
||||
|
|
@ -3206,7 +3229,7 @@ function outputAction(outputDir, printer) {
|
|||
for (const external in fromExternalImports) {
|
||||
statements.splice(0, 0, factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports(fromExternalImports[external].map(ele => factory.createImportSpecifier(false, ele[1] === undefined ? undefined : factory.createIdentifier(ele[1]), factory.createIdentifier(ele[0]))))), factory.createStringLiteral(external), undefined));
|
||||
}
|
||||
statements.push(factory.createVariableStatement([factory.createModifier(ts.SyntaxKind.ExportKeyword)], factory.createVariableDeclarationList([factory.createVariableDeclaration(factory.createIdentifier("ActionDefDict"), undefined, undefined, factory.createObjectLiteralExpression(actionDefNames.map(ele => factory.createPropertyAssignment(factory.createIdentifier(`${ele}State`), factory.createIdentifier(`${(0, string_1.firstLetterUpperCase)(ele)}ActionDef`))), true))], ts.NodeFlags.Const)));
|
||||
statements.push(factory.createVariableStatement([factory.createModifier(ts.SyntaxKind.ExportKeyword)], factory.createVariableDeclarationList([factory.createVariableDeclaration(factory.createIdentifier("actionDefDict"), undefined, undefined, factory.createObjectLiteralExpression(actionDefNames.map(ele => factory.createPropertyAssignment(factory.createIdentifier(`${ele}State`), factory.createIdentifier(`${(0, string_1.firstLetterUpperCase)(ele)}ActionDef`))), true))], ts.NodeFlags.Const)));
|
||||
/* const result = printer.printNode(
|
||||
ts.EmitHint.Unspecified,
|
||||
factory.createSourceFile(statements,
|
||||
|
|
@ -3218,10 +3241,10 @@ function outputAction(outputDir, printer) {
|
|||
const result = printer.printList(ts.ListFormat.SourceFileStatements, factory.createNodeArray(importStatements.concat(statements)), sourceFile);
|
||||
const filename = path_1.default.join(outputDir, entity, 'Action.ts');
|
||||
(0, fs_1.writeFileSync)(filename, result, { flag: 'w' });
|
||||
actionDictStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports([factory.createImportSpecifier(false, factory.createIdentifier("ActionDefDict"), factory.createIdentifier(entity))])), factory.createStringLiteral(`./${entity}/Action`)));
|
||||
propertyAssignments.push(factory.createPropertyAssignment(factory.createIdentifier((0, string_1.firstLetterLowerCase)(entity)), factory.createIdentifier(entity)));
|
||||
actionDictStatements.push(factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports([factory.createImportSpecifier(false, factory.createIdentifier("actionDefDict"), factory.createIdentifier((0, string_1.firstLetterLowerCase)(entity)))])), factory.createStringLiteral(`./${entity}/Action`)));
|
||||
propertyAssignments.push(factory.createShorthandPropertyAssignment(factory.createIdentifier((0, string_1.firstLetterLowerCase)(entity))));
|
||||
}
|
||||
actionDictStatements.push(factory.createVariableStatement([factory.createModifier(ts.SyntaxKind.ExportKeyword)], factory.createVariableDeclarationList([factory.createVariableDeclaration(factory.createIdentifier("ActionDefDict"), undefined, undefined, factory.createObjectLiteralExpression(propertyAssignments, true))], ts.NodeFlags.Const)));
|
||||
actionDictStatements.push(factory.createVariableStatement([factory.createModifier(ts.SyntaxKind.ExportKeyword)], factory.createVariableDeclarationList([factory.createVariableDeclaration(factory.createIdentifier("actionDefDict"), undefined, undefined, factory.createObjectLiteralExpression(propertyAssignments, true))], ts.NodeFlags.Const)));
|
||||
const resultFile = ts.createSourceFile("someFileName.ts", "", ts.ScriptTarget.Latest, /*setParentNodes*/ false, ts.ScriptKind.TS);
|
||||
const result = printer.printNode(ts.EmitHint.Unspecified, factory.createSourceFile(actionDictStatements, factory.createToken(ts.SyntaxKind.EndOfFileToken), ts.NodeFlags.None), resultFile);
|
||||
const fileName = path_1.default.join(outputDir, 'ActionDefDict.ts');
|
||||
|
|
@ -3577,6 +3600,7 @@ function outputIndexTs(outputDir) {
|
|||
export * from './Storage';
|
||||
export * from './ActionDefDict';
|
||||
export * from './Relation';
|
||||
export * from './StyleDict';
|
||||
`;
|
||||
const filename = path_1.default.join(outputDir, 'index.ts');
|
||||
(0, fs_1.writeFileSync)(filename, indexTs, { flag: 'w' });
|
||||
|
|
@ -4132,6 +4156,40 @@ function outputRelation2(outputDir, printer) {
|
|||
const filename = path_1.default.join(outputDir, 'Relation.ts');
|
||||
(0, fs_1.writeFileSync)(filename, result, { flag: 'w' });
|
||||
}
|
||||
/**
|
||||
* 输出oak-app-domain中的StyleDict.ts文件
|
||||
* @param outputDir
|
||||
* @param printer
|
||||
*/
|
||||
function outputStyleDict(outputDir, printer) {
|
||||
for (const entity in StyleAsts) {
|
||||
const stmts = [
|
||||
factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports([
|
||||
factory.createImportSpecifier(false, undefined, factory.createIdentifier("EntityDef"))
|
||||
])), factory.createStringLiteral("./Schema"), undefined),
|
||||
factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports([factory.createImportSpecifier(false, undefined, factory.createIdentifier("StyleDef"))])), factory.createStringLiteral(`${(0, env_1.TYPE_PATH_IN_OAK_DOMAIN)()}Style`), undefined),
|
||||
factory.createVariableStatement([factory.createToken(ts.SyntaxKind.ExportKeyword)], factory.createVariableDeclarationList([factory.createVariableDeclaration(factory.createIdentifier("style"), undefined, factory.createTypeReferenceNode(factory.createIdentifier("StyleDef"), [
|
||||
factory.createIndexedAccessTypeNode(factory.createTypeReferenceNode(factory.createIdentifier("EntityDef"), undefined), factory.createLiteralTypeNode(factory.createStringLiteral("OpSchema"))),
|
||||
factory.createIndexedAccessTypeNode(factory.createTypeReferenceNode(factory.createIdentifier("EntityDef"), undefined), factory.createLiteralTypeNode(factory.createStringLiteral("Action")))
|
||||
]), StyleAsts[entity])], ts.NodeFlags.Const))
|
||||
];
|
||||
const { sourceFile } = Schema[entity];
|
||||
const result = printer.printList(ts.ListFormat.SourceFileStatements, factory.createNodeArray(stmts), sourceFile);
|
||||
const filename = path_1.default.join(outputDir, entity, 'Style.ts');
|
||||
(0, fs_1.writeFileSync)(filename, result, { flag: 'w' });
|
||||
}
|
||||
const stmts = [
|
||||
factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports([
|
||||
factory.createImportSpecifier(false, undefined, factory.createIdentifier("EntityDict"))
|
||||
])), factory.createStringLiteral("./EntityDict"), undefined),
|
||||
factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports([factory.createImportSpecifier(false, undefined, factory.createIdentifier("StyleDict"))])), factory.createStringLiteral(`${(0, env_1.TYPE_PATH_IN_OAK_DOMAIN)(1)}Style`), undefined),
|
||||
...Object.keys(StyleAsts).map((entity) => factory.createImportDeclaration(undefined, factory.createImportClause(false, undefined, factory.createNamedImports([factory.createImportSpecifier(false, factory.createIdentifier("style"), factory.createIdentifier((0, string_1.firstLetterLowerCase)(entity)))])), factory.createStringLiteral(`./${entity}/Style`), undefined)),
|
||||
factory.createVariableStatement([factory.createToken(ts.SyntaxKind.ExportKeyword)], factory.createVariableDeclarationList([factory.createVariableDeclaration(factory.createIdentifier("styleDict"), undefined, factory.createTypeReferenceNode(factory.createIdentifier("StyleDict"), [factory.createTypeReferenceNode(factory.createIdentifier("EntityDict"), undefined)]), factory.createObjectLiteralExpression(Object.keys(StyleAsts).map((entity) => factory.createShorthandPropertyAssignment(factory.createIdentifier((0, string_1.firstLetterLowerCase)(entity)), undefined)), true))], ts.NodeFlags.Const))
|
||||
];
|
||||
const result = printer.printList(ts.ListFormat.SourceFileStatements, factory.createNodeArray(stmts), ts.createSourceFile("someFileName.ts", "", ts.ScriptTarget.Latest, /*setParentNodes*/ false, ts.ScriptKind.TS));
|
||||
const filename = path_1.default.join(outputDir, 'StyleDict.ts');
|
||||
(0, fs_1.writeFileSync)(filename, result, { flag: 'w' });
|
||||
}
|
||||
function analyzeEntities(inputDir, relativePath) {
|
||||
const files = (0, fs_1.readdirSync)(inputDir);
|
||||
const fullFilenames = files.map(ele => {
|
||||
|
|
@ -4161,6 +4219,7 @@ function buildSchema(outputDir) {
|
|||
outputEntityDict(outputDir, printer);
|
||||
outputStorage(outputDir, printer);
|
||||
outputRelation2(outputDir, printer);
|
||||
outputStyleDict(outputDir, printer);
|
||||
outputIndexTs(outputDir);
|
||||
if (!process.env.COMPLING_AS_LIB) {
|
||||
outputPackageJson(outputDir);
|
||||
|
|
|
|||
|
|
@ -46,4 +46,17 @@ const entityDesc = {
|
|||
],
|
||||
},
|
||||
],
|
||||
style: {
|
||||
icon: {
|
||||
apply: '',
|
||||
abandon: '',
|
||||
},
|
||||
color: {
|
||||
iState: {
|
||||
active: '#0000FF',
|
||||
applied: '#008000',
|
||||
abandoned: '#A9A9A9',
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -29,4 +29,15 @@ exports.entityDesc = {
|
|||
}
|
||||
},
|
||||
},
|
||||
style: {
|
||||
icon: {
|
||||
mergeTo: '',
|
||||
},
|
||||
color: {
|
||||
userState: {
|
||||
normal: '#112233',
|
||||
merged: '#223344',
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { ActionDictOfEntityDict, Checker, EntityDict, StorageSchema, AttrUpdateMatrix } from "../types";
|
||||
import { ActionDefDict, Checker, EntityDict, StorageSchema, AttrUpdateMatrix } from "../types";
|
||||
import { SyncContext } from "./SyncRowStore";
|
||||
import { AsyncContext } from "./AsyncRowStore";
|
||||
import { EntityDict as BaseEntityDict } from '../base-app-domain/EntityDict';
|
||||
export declare function makeIntrinsicCheckers<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(schema: StorageSchema<ED>, actionDefDict: ActionDictOfEntityDict<ED>, attrUpdateMatrix?: AttrUpdateMatrix<ED>): Checker<ED, keyof ED, Cxt | FrontCxt>[];
|
||||
export declare function makeIntrinsicCheckers<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(schema: StorageSchema<ED>, actionDefDict: ActionDefDict<ED>, attrUpdateMatrix?: AttrUpdateMatrix<ED>): Checker<ED, keyof ED, Cxt | FrontCxt>[];
|
||||
|
|
|
|||
|
|
@ -1,12 +1,14 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.makeIntrinsicCheckers = void 0;
|
||||
const tslib_1 = require("tslib");
|
||||
const types_1 = require("../types");
|
||||
const lodash_1 = require("../utils/lodash");
|
||||
const filter_1 = require("./filter");
|
||||
const modi_1 = require("./modi");
|
||||
const checker_1 = require("./checker");
|
||||
const action_1 = require("../actions/action");
|
||||
const assert_1 = tslib_1.__importDefault(require("assert"));
|
||||
function checkUniqueBetweenRows(rows, uniqAttrs) {
|
||||
// 先检查这些行本身之间有无unique冲突
|
||||
const dict = {};
|
||||
|
|
@ -191,19 +193,21 @@ function createActionTransformerCheckers(actionDefDict) {
|
|||
// 这里用data类型的checker改数据了不太好,先这样
|
||||
checkers.push({
|
||||
action: action,
|
||||
type: 'logicalData',
|
||||
type: 'logical',
|
||||
entity,
|
||||
checker: (operation) => {
|
||||
const { data } = operation;
|
||||
if (data instanceof Array) {
|
||||
data.forEach((d) => Object.assign(d, {
|
||||
[attr]: stm[action][1],
|
||||
}));
|
||||
}
|
||||
else {
|
||||
Object.assign(data, {
|
||||
[attr]: stm[action][1],
|
||||
});
|
||||
if (data) {
|
||||
if (data instanceof Array) {
|
||||
data.forEach((d) => Object.assign(d, {
|
||||
[attr]: stm[action][1],
|
||||
}));
|
||||
}
|
||||
else {
|
||||
Object.assign(data, {
|
||||
[attr]: stm[action][1],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
@ -211,7 +215,7 @@ function createActionTransformerCheckers(actionDefDict) {
|
|||
if (is) {
|
||||
checkers.push({
|
||||
action: 'create',
|
||||
type: 'logicalData',
|
||||
type: 'logical',
|
||||
entity,
|
||||
priority: 10, // 优先级要高,先于真正的data检查进行
|
||||
checker: (operation) => {
|
||||
|
|
@ -225,7 +229,7 @@ function createActionTransformerCheckers(actionDefDict) {
|
|||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
else if (data) {
|
||||
if (!data[attr]) {
|
||||
Object.assign(data, {
|
||||
[attr]: is,
|
||||
|
|
@ -239,6 +243,60 @@ function createActionTransformerCheckers(actionDefDict) {
|
|||
}
|
||||
return checkers;
|
||||
}
|
||||
/**
|
||||
* 检查一次更新是否有关联通过的可能
|
||||
* 例如,更新A的条件是B = 1,此时行上的B并不等于1,但由于更新数据是: { B: 1, A: .. }
|
||||
* 此时如果B更新可以成功则A也可以成功
|
||||
* @param entity
|
||||
* @param data
|
||||
* @param filters
|
||||
* @param context
|
||||
*/
|
||||
function cascadelyCheckUpdateFilters(entity, schema, data, filter, matrix, restAttrs, context) {
|
||||
const successAttrs = (0, lodash_1.difference)(Object.keys(data), restAttrs);
|
||||
const successAttrFilter = (0, lodash_1.pick)(data, successAttrs);
|
||||
/**
|
||||
* 先找到能直接更新成功的属性
|
||||
*/
|
||||
const legalAttrResult = restAttrs.map((attr) => {
|
||||
const { filter: f } = matrix[attr];
|
||||
if (!f) {
|
||||
return true;
|
||||
}
|
||||
// 此时看应用了success的attributes更新后,能否消除掉f中的部分条件
|
||||
const result = (0, filter_1.analyzeFilterRelation)(entity, schema, successAttrFilter, f, true);
|
||||
if (typeof result === 'boolean') {
|
||||
return result;
|
||||
}
|
||||
const { sureAttributes } = result;
|
||||
const f2 = (0, lodash_1.omit)(f, sureAttributes);
|
||||
return (0, filter_1.checkFilterContains)(entity, context, f2, filter, true);
|
||||
});
|
||||
const checkResult1 = (lar) => {
|
||||
const legalAttrs = [];
|
||||
const illegalAttrs = [];
|
||||
(0, assert_1.default)(lar.length === restAttrs.length);
|
||||
lar.forEach((ele, idx) => {
|
||||
if (ele) {
|
||||
legalAttrs.push(restAttrs[idx]);
|
||||
}
|
||||
else {
|
||||
illegalAttrs.push(restAttrs[idx]);
|
||||
}
|
||||
});
|
||||
if (illegalAttrs.length === 0) {
|
||||
return;
|
||||
}
|
||||
if (legalAttrs.length === 0) {
|
||||
throw new types_1.OakAttrCantUpdateException(entity, illegalAttrs, '更新的行当前属性不满足约束,请仔细检查数据');
|
||||
}
|
||||
return cascadelyCheckUpdateFilters(entity, schema, data, filter, matrix, illegalAttrs, context);
|
||||
};
|
||||
if (legalAttrResult.find(ele => ele instanceof Promise)) {
|
||||
return Promise.all(legalAttrResult).then((lar) => checkResult1(lar));
|
||||
}
|
||||
return checkResult1(legalAttrResult);
|
||||
}
|
||||
function createAttrUpdateCheckers(schema, attrUpdateMatrix) {
|
||||
const checkers = [];
|
||||
for (const entity in attrUpdateMatrix) {
|
||||
|
|
@ -268,19 +326,27 @@ function createAttrUpdateCheckers(schema, attrUpdateMatrix) {
|
|||
if (!a.includes(action)) {
|
||||
// 找到不满足的那个attr
|
||||
const attrsIllegal = attrs.filter((attr) => matrix[attr]?.actions && !matrix[attr]?.actions?.includes(action));
|
||||
throw new types_1.OakAttrCantUpdateException(entity, attrsIllegal, `${attrsIllegal}不允许被${action}动作更新`);
|
||||
throw new types_1.OakAttrCantUpdateException(entity, attrsIllegal, `${attrsIllegal.join(',')}不允许被${action}动作更新`);
|
||||
}
|
||||
}
|
||||
if (f) {
|
||||
const rr = (0, filter_1.contains)(entity, context.getSchema(), data, f);
|
||||
console.log(rr);
|
||||
const result = (0, filter_1.checkFilterContains)(entity, context, f, filter, true);
|
||||
if (result instanceof Promise) {
|
||||
return result.then((v) => {
|
||||
if (!v) {
|
||||
if (attrs.length > 1) {
|
||||
return cascadelyCheckUpdateFilters(entity, schema, data, filter, matrix, attrs, context);
|
||||
}
|
||||
throw new types_1.OakAttrCantUpdateException(entity, attrs, '更新的行当前属性不满足约束,请仔细检查数据');
|
||||
}
|
||||
});
|
||||
}
|
||||
if (!result) {
|
||||
if (attrs.length > 1) {
|
||||
return cascadelyCheckUpdateFilters(entity, schema, data, filter, matrix, attrs, context);
|
||||
}
|
||||
throw new types_1.OakAttrCantUpdateException(entity, attrs, '更新的行当前属性不满足约束,请仔细检查数据');
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import { ActionDictOfEntityDict, Checker, EntityDict, StorageSchema, Trigger, Watcher, AttrUpdateMatrix } from "../types";
|
||||
import { ActionDefDict, Checker, EntityDict, StorageSchema, Trigger, Watcher, AttrUpdateMatrix } from "../types";
|
||||
import { SyncContext } from "./SyncRowStore";
|
||||
import { AsyncContext } from "./AsyncRowStore";
|
||||
import { EntityDict as BaseEntityDict } from '../base-app-domain/EntityDict';
|
||||
export declare function makeIntrinsicCTWs<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(schema: StorageSchema<ED>, actionDefDict: ActionDictOfEntityDict<ED>, attrUpdateMatrix?: AttrUpdateMatrix<ED>): {
|
||||
export declare function makeIntrinsicLogics<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(schema: StorageSchema<ED>, actionDefDict: ActionDefDict<ED>, attrUpdateMatrix?: AttrUpdateMatrix<ED>): {
|
||||
triggers: Trigger<ED, keyof ED, Cxt>[];
|
||||
checkers: Checker<ED, keyof ED, Cxt | FrontCxt>[];
|
||||
watchers: Watcher<ED, keyof ED, Cxt>[];
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.makeIntrinsicCTWs = void 0;
|
||||
exports.makeIntrinsicLogics = void 0;
|
||||
const actionAuth_1 = require("./actionAuth");
|
||||
const modi_1 = require("./modi");
|
||||
const IntrinsicCheckers_1 = require("./IntrinsicCheckers");
|
||||
|
|
@ -31,7 +31,7 @@ function createExpiredWatchers(schema) {
|
|||
}
|
||||
return watchers;
|
||||
}
|
||||
function makeIntrinsicCTWs(schema, actionDefDict, attrUpdateMatrix) {
|
||||
function makeIntrinsicLogics(schema, actionDefDict, attrUpdateMatrix) {
|
||||
const checkers = (0, IntrinsicCheckers_1.makeIntrinsicCheckers)(schema, actionDefDict, attrUpdateMatrix);
|
||||
const triggers = (0, modi_1.createModiRelatedTriggers)(schema);
|
||||
triggers.push(...actionAuth_1.triggers);
|
||||
|
|
@ -42,4 +42,4 @@ function makeIntrinsicCTWs(schema, actionDefDict, attrUpdateMatrix) {
|
|||
watchers,
|
||||
};
|
||||
}
|
||||
exports.makeIntrinsicCTWs = makeIntrinsicCTWs;
|
||||
exports.makeIntrinsicLogics = makeIntrinsicLogics;
|
||||
|
|
|
|||
|
|
@ -4,6 +4,14 @@ import { AsyncContext } from './AsyncRowStore';
|
|||
import { SyncContext } from './SyncRowStore';
|
||||
export declare function translateCreateDataToFilter<ED extends EntityDict & BaseEntityDict, T extends keyof ED>(schema: StorageSchema<ED>, entity: T, data: ED[T]['CreateSingle']['data'], allowUnrecoganized: boolean): ED[T]["Selection"]["filter"];
|
||||
export declare function combineFilters<ED extends EntityDict & BaseEntityDict, T extends keyof ED>(entity: T, schema: StorageSchema<ED>, filters: Array<ED[T]['Selection']['filter']>, union?: true): ED[T]["Selection"]["filter"] | undefined;
|
||||
type DeducedFilter<ED extends EntityDict & BaseEntityDict, T extends keyof ED> = {
|
||||
entity: T;
|
||||
filter: ED[T]['Selection']['filter'];
|
||||
};
|
||||
type DeducedFilterCombination<ED extends EntityDict & BaseEntityDict> = {
|
||||
$or?: (DeducedFilterCombination<ED> | DeducedFilter<ED, keyof ED>)[];
|
||||
$and?: (DeducedFilterCombination<ED> | DeducedFilter<ED, keyof ED>)[];
|
||||
};
|
||||
/**
|
||||
* 在以下判断相容或相斥的过程中,相容/相斥的事实标准是:满足两个条件的查询集合是否被包容/互斥,但如果两个filter在逻辑上相容或者相斥,在事实上不一定相容或者相斥
|
||||
* 例如:{ a: 1 } 和 { a: { $ne: 1 } } 是明显不相容的查询,但如果数据为空集,则这两个查询并不能否定其相容
|
||||
|
|
@ -35,6 +43,44 @@ export declare function combineFilters<ED extends EntityDict & BaseEntityDict, T
|
|||
* @attention: 1)这里的测试不够充分,有些算子之间的相容或相斥可能有遗漏, 2)有新的算子加入需要修改代码
|
||||
*/
|
||||
export declare function judgeValueRelation(value1: any, value2: any, contained: boolean): boolean | undefined;
|
||||
/**
|
||||
* 根据filter对compared查询的各个条件进行逐项分析
|
||||
* @param entity
|
||||
* @param schema
|
||||
* @param filter
|
||||
* @param compared
|
||||
* @param contained
|
||||
* @returns
|
||||
* sureAttributes中包含被判定肯定相容或肯定不相斥的属性(不用再继续判定了)
|
||||
* uncertainAttributes中包含的是无法判定结果的属性
|
||||
* totalAndDeducedFilters包含的是判定过程中推论的相容的充分条件(and关系)
|
||||
* totalOrDeducedFilters包含的是判定过程中推论的相斥的充分条件(or关系)
|
||||
*/
|
||||
export declare function analyzeFilterRelation<ED extends EntityDict & BaseEntityDict, T extends keyof ED>(entity: T, schema: StorageSchema<ED>, filter: NonNullable<ED[T]['Selection']['filter']>, compared: NonNullable<ED[T]['Selection']['filter']>, contained: boolean): boolean | {
|
||||
totalAndDeducedFilters: (DeducedFilterCombination<ED> | DeducedFilter<ED, T>)[];
|
||||
totalOrDeducedFilters: (DeducedFilterCombination<ED> | DeducedFilter<ED, T>)[];
|
||||
uncertainAttributes: string[];
|
||||
sureAttributes: string[];
|
||||
};
|
||||
/**
|
||||
*
|
||||
* 判断filter是否包含contained中的查询条件,即filter查询的结果一定是contained查询结果的子集
|
||||
* filter = {
|
||||
* a: 1
|
||||
* b: 2,
|
||||
* c: 3,
|
||||
* },
|
||||
* conditionalFilter = {
|
||||
* a: 1
|
||||
* }
|
||||
* 则包含
|
||||
* @param entity
|
||||
* @param schema
|
||||
* @param filter
|
||||
* @param contained
|
||||
* @returns
|
||||
*/
|
||||
export declare function contains<ED extends EntityDict & BaseEntityDict, T extends keyof ED>(entity: T, schema: StorageSchema<ED>, filter: ED[T]['Selection']['filter'], contained: ED[T]['Selection']['filter']): boolean | DeducedFilterCombination<ED>;
|
||||
/**
|
||||
* 从filter中判断是否有确定的id对象,如果有则返回这些id,没有返回空数组
|
||||
* @param filter
|
||||
|
|
@ -84,3 +130,4 @@ export declare function checkFilterRepel<ED extends EntityDict & BaseEntityDict,
|
|||
* @param filter
|
||||
*/
|
||||
export declare function translateFilterToObjectPredicate(filter: Record<string, any>): {};
|
||||
export {};
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.translateFilterToObjectPredicate = exports.checkFilterRepel = exports.checkFilterContains = exports.makeTreeDescendantFilter = exports.makeTreeAncestorFilter = exports.same = exports.getRelevantIds = exports.judgeValueRelation = exports.combineFilters = exports.translateCreateDataToFilter = void 0;
|
||||
exports.translateFilterToObjectPredicate = exports.checkFilterRepel = exports.checkFilterContains = exports.makeTreeDescendantFilter = exports.makeTreeAncestorFilter = exports.same = exports.getRelevantIds = exports.contains = exports.analyzeFilterRelation = exports.judgeValueRelation = exports.combineFilters = exports.translateCreateDataToFilter = void 0;
|
||||
const tslib_1 = require("tslib");
|
||||
const assert_1 = tslib_1.__importDefault(require("assert"));
|
||||
const types_1 = require("../types");
|
||||
|
|
@ -43,9 +43,24 @@ function addFilterSegment(entity, schema, ...filters) {
|
|||
if (!filter[attr]) {
|
||||
filter[attr] = value;
|
||||
}
|
||||
// 只优化一种情况,就是两个都等值且相等
|
||||
// 优化两个都等值且相等
|
||||
else if (filter[attr] === value) {
|
||||
}
|
||||
// value定义的查询被当前查询包含
|
||||
else if (contains(entity, schema, {
|
||||
[attr]: value,
|
||||
}, {
|
||||
[attr]: filter[attr],
|
||||
}) === true) {
|
||||
filter[attr] = value;
|
||||
}
|
||||
// 当前查询被value所定义的查询包含
|
||||
else if (contains(entity, schema, {
|
||||
[attr]: filter[attr],
|
||||
}, {
|
||||
[attr]: value
|
||||
}) == true) {
|
||||
}
|
||||
else {
|
||||
addIntoAnd({
|
||||
[attr]: value,
|
||||
|
|
@ -1041,15 +1056,20 @@ function judgeFilterSingleAttrRelation(entity, schema, attr, filter, compared, c
|
|||
// 到这里说明无法直接判断此attr上的相容或者相斥,也无法把判定推断到更深层的算子之上
|
||||
return;
|
||||
}
|
||||
/** 判断filter条件对compared条件是否相容或相斥
|
||||
/**
|
||||
* 根据filter对compared查询的各个条件进行逐项分析
|
||||
* @param entity
|
||||
* @param schema
|
||||
* @param filter
|
||||
* @param compared
|
||||
* @param contained: true代表判定filter包容compared(filter的查询结果是compared查询结果的子集), false代表判定filter与compared相斥(filter的查询结果与compared没有交集)
|
||||
* @returns 返回true说明肯定相容(相斥),返回false说明无法判定相容(相斥),返回DeducedFilterCombination说明需要进一步判断此推断的条件
|
||||
* @param contained
|
||||
* @returns
|
||||
* sureAttributes中包含被判定肯定相容或肯定不相斥的属性(不用再继续判定了)
|
||||
* uncertainAttributes中包含的是无法判定结果的属性
|
||||
* totalAndDeducedFilters包含的是判定过程中推论的相容的充分条件(and关系)
|
||||
* totalOrDeducedFilters包含的是判定过程中推论的相斥的充分条件(or关系)
|
||||
*/
|
||||
function judgeFilterRelation(entity, schema, filter, compared, contained) {
|
||||
function analyzeFilterRelation(entity, schema, filter, compared, contained) {
|
||||
const totalAndDeducedFilters = [];
|
||||
const totalOrDeducedFilters = [];
|
||||
const uncertainAttributes = [];
|
||||
|
|
@ -1239,6 +1259,28 @@ function judgeFilterRelation(entity, schema, filter, compared, contained) {
|
|||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
totalAndDeducedFilters,
|
||||
totalOrDeducedFilters,
|
||||
uncertainAttributes,
|
||||
sureAttributes,
|
||||
};
|
||||
}
|
||||
exports.analyzeFilterRelation = analyzeFilterRelation;
|
||||
/** 判断filter条件对compared条件是否相容或相斥
|
||||
* @param entity
|
||||
* @param schema
|
||||
* @param filter
|
||||
* @param compared
|
||||
* @param contained: true代表判定filter包容compared(filter的查询结果是compared查询结果的子集), false代表判定filter与compared相斥(filter的查询结果与compared没有交集)
|
||||
* @returns 返回true说明肯定相容(相斥),返回false说明无法判定相容(相斥),返回DeducedFilterCombination说明需要进一步判断此推断的条件
|
||||
*/
|
||||
function judgeFilterRelation(entity, schema, filter, compared, contained) {
|
||||
const result = analyzeFilterRelation(entity, schema, filter, compared, contained);
|
||||
if (typeof result === 'boolean') {
|
||||
return result;
|
||||
}
|
||||
const { sureAttributes, uncertainAttributes, totalAndDeducedFilters, totalOrDeducedFilters, } = result;
|
||||
if (contained) {
|
||||
if (sureAttributes.length === Object.keys(compared).length) {
|
||||
return true;
|
||||
|
|
@ -1296,6 +1338,7 @@ function contains(entity, schema, filter, contained) {
|
|||
return judgeFilterRelation(entity, schema, filter, contained, true);
|
||||
// return false;
|
||||
}
|
||||
exports.contains = contains;
|
||||
/**
|
||||
* 判断filter1和filter2是否相斥,即filter1和filter2查询的结果一定没有交集
|
||||
* filter1 = {
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ export type ActionDef<A extends Action, S extends State> = {
|
|||
};
|
||||
is?: S;
|
||||
};
|
||||
export type ActionDictOfEntityDict<E extends EntityDict> = {
|
||||
export type ActionDefDict<E extends EntityDict> = {
|
||||
[T in keyof E]?: {
|
||||
[A in keyof E[T]['OpSchema']]?: ActionDef<string, string>;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,25 +1,33 @@
|
|||
import { AuthDeduceRelationMap, EntityDict } from './Entity';
|
||||
import { EntityDict as BaseEntityDict } from "../base-app-domain";
|
||||
import { AsyncContext } from '../store/AsyncRowStore';
|
||||
import { SyncConfig } from "./Sync";
|
||||
import { AttrUpdateMatrix } from './EntityDesc';
|
||||
import { ActionDefDict } from './Action';
|
||||
import { StyleDict } from './Style';
|
||||
/**
|
||||
* 后台环境配置
|
||||
* 后台配置
|
||||
*/
|
||||
export type ServerConfiguration = {
|
||||
export type ServerConfiguration<ED extends BaseEntityDict & EntityDict, Cxt extends AsyncContext<ED>> = {
|
||||
database: {
|
||||
type: 'mysql';
|
||||
host: string;
|
||||
database: string;
|
||||
port: number;
|
||||
port?: number;
|
||||
user: string;
|
||||
password?: string;
|
||||
connectionLimit: number;
|
||||
charset: "utf8mb4_general_ci";
|
||||
};
|
||||
http: {
|
||||
port: number;
|
||||
workDir: {
|
||||
path: string;
|
||||
};
|
||||
sync?: SyncConfig<ED, Cxt>;
|
||||
};
|
||||
/**
|
||||
* 前后台共用的配置
|
||||
* 前后台访问配置
|
||||
*/
|
||||
export type ProjectConfiguration = {
|
||||
export type AccessConfiguration = {
|
||||
routerPrefixes?: {
|
||||
aspect?: string;
|
||||
endpoint?: string;
|
||||
|
|
@ -27,6 +35,33 @@ export type ProjectConfiguration = {
|
|||
getSubscribePoint?: string;
|
||||
bridge?: string;
|
||||
};
|
||||
http: {
|
||||
hostname: string;
|
||||
port?: number;
|
||||
ssl?: boolean;
|
||||
path?: string;
|
||||
};
|
||||
};
|
||||
/**
|
||||
* 业务逻辑的通用配置
|
||||
*/
|
||||
export type CommonConfiguration<ED extends BaseEntityDict & EntityDict> = {
|
||||
attrUpdateMatrix: AttrUpdateMatrix<ED>;
|
||||
actionDefDict: ActionDefDict<ED>;
|
||||
authDeduceRelationMap: AuthDeduceRelationMap<ED>;
|
||||
selectFreeEntities?: (keyof ED)[];
|
||||
updateFreeDict?: {
|
||||
[A in keyof ED]?: string[];
|
||||
};
|
||||
cacheSavedEntities?: (keyof ED)[];
|
||||
cacheKeepFreshPeriod?: number;
|
||||
};
|
||||
export type DependencyConfiguration = string[];
|
||||
/**
|
||||
* 渲染相关定义
|
||||
*/
|
||||
export type RenderConfiguration<ED extends BaseEntityDict & EntityDict> = {
|
||||
styleDict: StyleDict<ED>;
|
||||
};
|
||||
/**
|
||||
* 编译环境配置
|
||||
|
|
|
|||
|
|
@ -1,3 +1,2 @@
|
|||
"use strict";
|
||||
// 将项目的所有配置规范化到一起(未完成)by Xc 20240207
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
|
|
|
|||
|
|
@ -36,4 +36,7 @@ export interface Connector<ED extends EntityDict, FrontCxt extends SyncContext<E
|
|||
url: string;
|
||||
headers?: Record<string, string>;
|
||||
};
|
||||
getFullData: (keys?: (keyof ED)[]) => Promise<{
|
||||
[T in keyof ED]?: ED[T]['OpSchema'][];
|
||||
}>;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,19 @@
|
|||
import { LocaleDef } from './Locale';
|
||||
import { Index } from './Storage';
|
||||
import { EntityShape, Configuration, EntityDict } from './Entity';
|
||||
export type EntityDesc<Schema extends EntityShape, Action extends string = '', Relation extends string = '', V extends Record<string, string> = {}> = {
|
||||
locales: LocaleDef<Schema, Action, Relation, V>;
|
||||
import { StyleDesc } from './Style';
|
||||
export type EntityDesc<Schema extends EntityShape, Action extends string = '', Relation extends string = '', V extends Record<string, string> = {
|
||||
['##oak_illegal##']: '';
|
||||
}> = {
|
||||
locales: LocaleDef<Schema, Action, Relation, keyof V extends '##oak_illegal##' ? {} : V>;
|
||||
indexes?: Index<Schema>[];
|
||||
configuration?: Configuration;
|
||||
recursiveDepth?: number;
|
||||
};
|
||||
} & (Action extends '' ? (keyof V extends '##oak_illegal##' ? {} : {
|
||||
style: StyleDesc<Action, V>;
|
||||
}) : {
|
||||
style: StyleDesc<Action, V>;
|
||||
});
|
||||
export type AttrUpdateMatrix<ED extends EntityDict> = {
|
||||
[T in keyof ED]?: {
|
||||
[A in keyof ED[T]['Update']['data']]?: {
|
||||
|
|
|
|||
|
|
@ -1,11 +1,40 @@
|
|||
import { EntityDict } from './Entity';
|
||||
import { EntityDict, GeneralEntityShape } from './Entity';
|
||||
import { EntityDict as BaseEntityDict } from '../base-app-domain';
|
||||
type ThemeColor = 'default' | 'success' | 'warning' | 'error' | 'primary' | 'danger';
|
||||
export type ColorDict<ED extends BaseEntityDict & EntityDict> = {
|
||||
[T in keyof ED]?: {
|
||||
[A in keyof ED[T]['OpSchema']]?: {
|
||||
[E in ED[T]['OpSchema'][A]]?: ThemeColor | `#${string}`;
|
||||
type Color = `#${string}`;
|
||||
type IconName = string;
|
||||
export type StyleDesc<Action extends string = '', V extends Record<string, string> = {
|
||||
['##oak_illegal##']: '';
|
||||
}> = Action extends '' ? (keyof V extends '##oak_illegal##' ? {} : {
|
||||
color: {
|
||||
[A in keyof V]: {
|
||||
[E in V[A]]: Color;
|
||||
};
|
||||
};
|
||||
}) : (keyof V extends '##oak_illegal##' ? {
|
||||
icon: {
|
||||
[A in Action]?: IconName;
|
||||
};
|
||||
} : {
|
||||
icon: {
|
||||
[A in Action]?: IconName;
|
||||
};
|
||||
color: {
|
||||
[A in keyof V]: {
|
||||
[E in V[A]]: Color;
|
||||
};
|
||||
};
|
||||
});
|
||||
export type StyleDef<ED extends GeneralEntityShape, Action extends string> = {
|
||||
color?: {
|
||||
[A in keyof ED]?: {
|
||||
[E in ED[A]]?: Color;
|
||||
};
|
||||
};
|
||||
icon?: {
|
||||
[A in Action]?: IconName;
|
||||
};
|
||||
};
|
||||
export type StyleDict<ED extends BaseEntityDict & EntityDict> = {
|
||||
[T in keyof ED]?: StyleDef<ED[T]['OpSchema'], ED[T]['Action']>;
|
||||
};
|
||||
export {};
|
||||
|
|
|
|||
|
|
@ -2,24 +2,20 @@
|
|||
import { IncomingHttpHeaders } from "http";
|
||||
import { SyncContext } from '../store/SyncRowStore';
|
||||
import { Connector, EntityDict, OakException, OpRecord } from "../types";
|
||||
type ServerOption = {
|
||||
protocol: string;
|
||||
hostname: string;
|
||||
port?: number;
|
||||
apiPath?: string;
|
||||
};
|
||||
export declare class SimpleConnector<ED extends EntityDict, FrontCxt extends SyncContext<ED>> implements Connector<ED, FrontCxt> {
|
||||
import { AccessConfiguration } from '../types/Configuration';
|
||||
export default class SimpleConnector<ED extends EntityDict, FrontCxt extends SyncContext<ED>> implements Connector<ED, FrontCxt> {
|
||||
static ASPECT_ROUTER: string;
|
||||
static BRIDGE_ROUTER: string;
|
||||
static SUBSCRIBE_ROUTER: string;
|
||||
static SUBSCRIBE_POINT_ROUTER: string;
|
||||
static ENDPOINT_ROUTER: string;
|
||||
private serverUrl;
|
||||
private serverAspectUrl;
|
||||
private serverBridgeUrl;
|
||||
private serverSubscribePointUrl;
|
||||
private option;
|
||||
private configuration;
|
||||
private makeException;
|
||||
constructor(option: ServerOption, makeException: (exceptionData: any) => OakException<ED>);
|
||||
constructor(configuration: AccessConfiguration, makeException: (exceptionData: any) => OakException<ED>);
|
||||
protected makeHeadersAndBody(name: string, data: any, context?: FrontCxt): Promise<{
|
||||
headers: Record<string, string>;
|
||||
body: FormData;
|
||||
|
|
@ -77,5 +73,5 @@ export declare class SimpleConnector<ED extends EntityDict, FrontCxt extends Syn
|
|||
url: string;
|
||||
headers?: Record<string, string> | undefined;
|
||||
};
|
||||
getFullData(): Promise<{}>;
|
||||
}
|
||||
export {};
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.SimpleConnector = void 0;
|
||||
const tslib_1 = require("tslib");
|
||||
const assert_1 = tslib_1.__importDefault(require("assert"));
|
||||
const stream_1 = require("stream");
|
||||
|
|
@ -12,25 +11,34 @@ class SimpleConnector {
|
|||
static SUBSCRIBE_ROUTER = process.env.OAK_SUBSCRIBE_ROUTER || '/subscribe';
|
||||
static SUBSCRIBE_POINT_ROUTER = '/subscribePoint';
|
||||
static ENDPOINT_ROUTER = '/endpoint';
|
||||
serverUrl;
|
||||
serverAspectUrl;
|
||||
serverBridgeUrl;
|
||||
serverSubscribePointUrl;
|
||||
option;
|
||||
configuration;
|
||||
makeException;
|
||||
constructor(option, makeException) {
|
||||
this.option = option;
|
||||
const { protocol, hostname, port, apiPath } = option;
|
||||
constructor(configuration, makeException) {
|
||||
this.configuration = configuration;
|
||||
const { routerPrefixes, http } = configuration;
|
||||
const { ssl, hostname, port, path } = http;
|
||||
const protocol = ssl ? 'https:' : 'http';
|
||||
let serverUrl = `${protocol}//${hostname}`;
|
||||
this.serverUrl = serverUrl;
|
||||
if (typeof port === 'number') {
|
||||
serverUrl += `:${port}`;
|
||||
}
|
||||
if (apiPath) {
|
||||
(0, assert_1.default)(apiPath.startsWith('/'), 'apiPath前缀必须存在/');
|
||||
serverUrl += apiPath;
|
||||
if (path) {
|
||||
if (path.startsWith('/')) {
|
||||
serverUrl += path;
|
||||
}
|
||||
else {
|
||||
serverUrl += `/${path}`;
|
||||
}
|
||||
}
|
||||
this.serverAspectUrl = `${serverUrl}${SimpleConnector.ASPECT_ROUTER}`;
|
||||
this.serverBridgeUrl = `${serverUrl}${SimpleConnector.BRIDGE_ROUTER}`;
|
||||
this.serverSubscribePointUrl = `${serverUrl}${SimpleConnector.SUBSCRIBE_POINT_ROUTER}`;
|
||||
this.serverAspectUrl = `${serverUrl}${routerPrefixes?.aspect || SimpleConnector.ASPECT_ROUTER}`;
|
||||
this.serverBridgeUrl = `${serverUrl}${routerPrefixes?.bridge || SimpleConnector.BRIDGE_ROUTER}`;
|
||||
this.serverSubscribePointUrl = `${serverUrl}${routerPrefixes?.getSubscribePoint ||
|
||||
SimpleConnector.SUBSCRIBE_POINT_ROUTER}`;
|
||||
this.makeException = makeException;
|
||||
}
|
||||
async makeHeadersAndBody(name, data, context) {
|
||||
|
|
@ -105,13 +113,13 @@ class SimpleConnector {
|
|||
return this.parseAspectResult(response);
|
||||
}
|
||||
getRouter() {
|
||||
return SimpleConnector.ASPECT_ROUTER;
|
||||
return this.configuration.routerPrefixes?.aspect || SimpleConnector.ASPECT_ROUTER;
|
||||
}
|
||||
getSubscribeRouter() {
|
||||
return SimpleConnector.SUBSCRIBE_ROUTER;
|
||||
return this.configuration.routerPrefixes?.subscribe || SimpleConnector.SUBSCRIBE_ROUTER;
|
||||
}
|
||||
getSubscribePointRouter() {
|
||||
return SimpleConnector.SUBSCRIBE_POINT_ROUTER;
|
||||
return this.configuration.routerPrefixes?.getSubscribePoint || SimpleConnector.SUBSCRIBE_POINT_ROUTER;
|
||||
}
|
||||
async getSubscribePoint() {
|
||||
let response;
|
||||
|
|
@ -130,7 +138,7 @@ class SimpleConnector {
|
|||
response.headers.get('content-type');
|
||||
if (responseType?.toLocaleLowerCase().match(/application\/json/i)) {
|
||||
const { url, path, port, namespace } = await response.json();
|
||||
let url2 = url || `${this.option.protocol}//${this.option.hostname}`;
|
||||
let url2 = url || this.serverUrl;
|
||||
(0, assert_1.default)(port);
|
||||
url2 += `:${port}`;
|
||||
if (namespace) {
|
||||
|
|
@ -146,7 +154,7 @@ class SimpleConnector {
|
|||
}
|
||||
}
|
||||
getEndpointRouter() {
|
||||
return SimpleConnector.ENDPOINT_ROUTER;
|
||||
return this.configuration.routerPrefixes?.endpoint || SimpleConnector.ENDPOINT_ROUTER;
|
||||
}
|
||||
parseRequest(headers, body, files) {
|
||||
const { 'oak-cxt': oakCxtStr, 'oak-aspect': aspectName } = headers;
|
||||
|
|
@ -213,5 +221,9 @@ class SimpleConnector {
|
|||
headers: headers && JSON.parse(headers),
|
||||
};
|
||||
}
|
||||
async getFullData() {
|
||||
console.error('前后台模式下暂时不支持此操作,请到数据库查看数据');
|
||||
return {};
|
||||
}
|
||||
}
|
||||
exports.SimpleConnector = SimpleConnector;
|
||||
exports.default = SimpleConnector;
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
/// <reference types="node" />
|
||||
/**
|
||||
* 防止assert打包体积过大,从这里引用
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ import intersection from 'lodash/intersection';
|
|||
import intersectionBy from 'lodash/intersectionBy';
|
||||
import omit from 'lodash/omit';
|
||||
import merge from 'lodash/merge';
|
||||
import mergeWith from 'lodash/mergeWith';
|
||||
import cloneDeep from 'lodash/cloneDeep';
|
||||
import pick from 'lodash/pick';
|
||||
import isEqual from 'lodash/isEqual';
|
||||
|
|
@ -21,4 +22,12 @@ import differenceBy from 'lodash/differenceBy';
|
|||
import groupBy from 'lodash/groupBy';
|
||||
import unionBy from 'lodash/unionBy';
|
||||
import pullAll from 'lodash/pullAll';
|
||||
export { unset, pull, uniq, uniqBy, get, set, intersection, intersectionBy, omit, merge, cloneDeep, pick, isEqual, union, difference, differenceBy, groupBy, unionBy, pullAll, };
|
||||
/**
|
||||
* merge两个对象,遇到array时使用连接合并
|
||||
* @param object
|
||||
* @param source
|
||||
* @returns
|
||||
*/
|
||||
declare function mergeConcatArray(object: any, source: any): any;
|
||||
declare function mergeConcatMany<T>(array: Array<T>): T;
|
||||
export { unset, pull, uniq, uniqBy, get, set, intersection, intersectionBy, omit, merge, mergeWith, mergeConcatArray, mergeConcatMany, cloneDeep, pick, isEqual, union, difference, differenceBy, groupBy, unionBy, pullAll, };
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.pullAll = exports.unionBy = exports.groupBy = exports.differenceBy = exports.difference = exports.union = exports.isEqual = exports.pick = exports.cloneDeep = exports.merge = exports.omit = exports.intersectionBy = exports.intersection = exports.set = exports.get = exports.uniqBy = exports.uniq = exports.pull = exports.unset = void 0;
|
||||
exports.pullAll = exports.unionBy = exports.groupBy = exports.differenceBy = exports.difference = exports.union = exports.isEqual = exports.pick = exports.cloneDeep = exports.mergeConcatMany = exports.mergeConcatArray = exports.mergeWith = exports.merge = exports.omit = exports.intersectionBy = exports.intersection = exports.set = exports.get = exports.uniqBy = exports.uniq = exports.pull = exports.unset = void 0;
|
||||
const tslib_1 = require("tslib");
|
||||
/**
|
||||
* 避免lodash打包体积过大
|
||||
|
|
@ -26,6 +26,8 @@ const omit_1 = tslib_1.__importDefault(require("lodash/omit"));
|
|||
exports.omit = omit_1.default;
|
||||
const merge_1 = tslib_1.__importDefault(require("lodash/merge"));
|
||||
exports.merge = merge_1.default;
|
||||
const mergeWith_1 = tslib_1.__importDefault(require("lodash/mergeWith"));
|
||||
exports.mergeWith = mergeWith_1.default;
|
||||
const cloneDeep_1 = tslib_1.__importDefault(require("lodash/cloneDeep"));
|
||||
exports.cloneDeep = cloneDeep_1.default;
|
||||
const pick_1 = tslib_1.__importDefault(require("lodash/pick"));
|
||||
|
|
@ -44,3 +46,27 @@ const unionBy_1 = tslib_1.__importDefault(require("lodash/unionBy"));
|
|||
exports.unionBy = unionBy_1.default;
|
||||
const pullAll_1 = tslib_1.__importDefault(require("lodash/pullAll"));
|
||||
exports.pullAll = pullAll_1.default;
|
||||
const assert_1 = tslib_1.__importDefault(require("assert"));
|
||||
/**
|
||||
* merge两个对象,遇到array时使用连接合并
|
||||
* @param object
|
||||
* @param source
|
||||
* @returns
|
||||
*/
|
||||
function mergeConcatArray(object, source) {
|
||||
if (object instanceof Array) {
|
||||
(0, assert_1.default)(source instanceof Array, '合并的对象必须结构一致');
|
||||
return (0, uniq_1.default)(object.concat(source));
|
||||
}
|
||||
return (0, mergeWith_1.default)(object, source, (objValue, srcValue) => {
|
||||
if (objValue instanceof Array) {
|
||||
(0, assert_1.default)(srcValue instanceof Array, '合并的对象必须结构一致');
|
||||
return (0, uniq_1.default)(objValue.concat(srcValue));
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.mergeConcatArray = mergeConcatArray;
|
||||
function mergeConcatMany(array) {
|
||||
return array.reduce((prev, current) => mergeConcatArray(prev, current));
|
||||
}
|
||||
exports.mergeConcatMany = mergeConcatMany;
|
||||
|
|
|
|||
|
|
@ -0,0 +1,15 @@
|
|||
import { AsyncContext } from "../../store/AsyncRowStore";
|
||||
import { SyncContext } from "../../store/SyncRowStore";
|
||||
import { EntityDict } from "../../types/Entity";
|
||||
import { EntityDict as BaseEntityDict } from '../../base-app-domain';
|
||||
import { Checker } from '../../types/Auth';
|
||||
import { CommonConfiguration } from '../../types/Configuration';
|
||||
/**
|
||||
* 合并引入模块中的checker和common
|
||||
* @param modules
|
||||
* @returns
|
||||
*/
|
||||
export default function combineBaseModules<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED> | SyncContext<ED>>(...modules: string[]): {
|
||||
checkers: Array<Checker<ED, keyof ED, Cxt>>;
|
||||
common: CommonConfiguration<ED>;
|
||||
};
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const tslib_1 = require("tslib");
|
||||
const lodash_1 = require("../../utils/lodash");
|
||||
const assert_1 = tslib_1.__importDefault(require("assert"));
|
||||
/**
|
||||
* 合并引入模块中的checker和common
|
||||
* @param modules
|
||||
* @returns
|
||||
*/
|
||||
function combineBaseModules(...modules) {
|
||||
// 合并模块中的checker/common
|
||||
return modules.map((module) => {
|
||||
const checkers = require(`${module}/lib/checkers`).default;
|
||||
const common = require(`${module}/lib/configuration`).default;
|
||||
(0, assert_1.default)(checkers instanceof Array, `${module}模块中的checkers不是数组`);
|
||||
(0, assert_1.default)(typeof common === 'object', `${module}模块中的common配置不是对象`);
|
||||
return {
|
||||
checkers,
|
||||
common,
|
||||
};
|
||||
}).reduce((prev, current) => ({
|
||||
checkers: (0, lodash_1.mergeConcatArray)(prev.checkers, current.checkers),
|
||||
common: (0, lodash_1.mergeConcatArray)(prev.common, current.common),
|
||||
}));
|
||||
}
|
||||
exports.default = combineBaseModules;
|
||||
|
|
@ -0,0 +1 @@
|
|||
export * from './combine.dev';
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
import { AsyncContext } from "../../store/AsyncRowStore";
|
||||
import { SyncContext } from "../../store/SyncRowStore";
|
||||
import { EntityDict } from "../../types/Entity";
|
||||
import { EntityDict as BaseEntityDict } from '../../base-app-domain';
|
||||
import { Aspect, Exportation, Importation, Routine, Timer, Trigger, Watcher } from '../../types';
|
||||
export default function combineModuleDev<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(...modules: string[]): {
|
||||
aspectDict: Record<string, Aspect<ED, Cxt>>;
|
||||
data: { [T in keyof ED]?: ED[T]["OpSchema"][] | undefined; };
|
||||
importations: Importation<ED, keyof ED, string, Cxt>;
|
||||
exportations: Exportation<ED, keyof ED, string, Cxt>;
|
||||
watchers: Watcher<ED, keyof ED, Cxt>[];
|
||||
timers: Timer<ED, keyof ED, Cxt>[];
|
||||
startRoutines: Routine<ED, keyof ED, Cxt>[];
|
||||
triggers: Trigger<ED, keyof ED, Cxt>[];
|
||||
checkers: import("../../types").Checker<ED, keyof ED, never>[];
|
||||
common: import("../../types/Configuration").CommonConfiguration<ED>;
|
||||
};
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const tslib_1 = require("tslib");
|
||||
const combine_common_1 = tslib_1.__importDefault(require("./combine.common"));
|
||||
const lodash_1 = require("../../utils/lodash");
|
||||
const assert_1 = tslib_1.__importDefault(require("assert"));
|
||||
function combineModuleDev(...modules) {
|
||||
const { checkers, common } = (0, combine_common_1.default)(...modules);
|
||||
const others = modules.map((module) => ({
|
||||
triggers: require(`${module}/lib/triggers`).default,
|
||||
aspectDict: require(`${module}/lib/aspects`).default,
|
||||
watchers: require(`${module}/lib/watchers`).default,
|
||||
timers: require(`${module}/lib/timers`).default,
|
||||
startRoutines: require(`${module}/lib/routines/start`).default,
|
||||
importations: require(`${module}/lib/ports`).importations,
|
||||
exportations: require(`${module}/lib/ports`).exportations,
|
||||
data: require(`${module}/lib/data`).default,
|
||||
})).reduce((prev, current, index) => {
|
||||
const check = (module, name) => {
|
||||
(0, assert_1.default)(typeof module.aspectDict === 'object', `${name}模块中的aspectDict不是对象`);
|
||||
(0, assert_1.default)(typeof module.data === 'object', `${name}模块中的data不是对象`);
|
||||
(0, assert_1.default)(module.exportations instanceof Array, `${name}模块中的exportations不是数组`);
|
||||
(0, assert_1.default)(module.importations instanceof Array, `${name}模块中的importations不是数组`);
|
||||
(0, assert_1.default)(module.watchers instanceof Array, `${name}模块中的watchers不是数组`);
|
||||
(0, assert_1.default)(module.timers instanceof Array, `${name}模块中的timers不是数组`);
|
||||
(0, assert_1.default)(module.triggers instanceof Array, `${name}模块中的triggers不是数组`);
|
||||
(0, assert_1.default)(module.startRoutines instanceof Array, `${name}模块中的startRoutines不是数组`);
|
||||
};
|
||||
if (index === 1) {
|
||||
check(prev, modules[0]);
|
||||
}
|
||||
check(current, modules[index]);
|
||||
// aspectDict中不应当有同名对象
|
||||
const its = (0, lodash_1.intersection)(Object.keys(prev.aspectDict), Object.keys(current.aspectDict));
|
||||
if (its.length > 0) {
|
||||
throw new Error(`模块${modules[index]}的aspectDict中,存在和其它模块同步的aspect【${its.join(',')}】,请正确处理`);
|
||||
}
|
||||
return {
|
||||
aspectDict: (0, lodash_1.mergeConcatArray)(prev.aspectDict, current.aspectDict),
|
||||
data: (0, lodash_1.mergeConcatArray)(prev.data, current.data),
|
||||
importations: (0, lodash_1.mergeConcatArray)(prev.importations, current.importations),
|
||||
exportations: (0, lodash_1.mergeConcatArray)(prev.exportations, current.exportations),
|
||||
watchers: (0, lodash_1.mergeConcatArray)(prev.watchers, current.watchers),
|
||||
timers: (0, lodash_1.merge)(prev.timers, current.timers),
|
||||
startRoutines: (0, lodash_1.merge)(prev.startRoutines, current.startRoutines),
|
||||
triggers: (0, lodash_1.merge)(prev.triggers, current.triggers),
|
||||
};
|
||||
});
|
||||
return {
|
||||
checkers,
|
||||
common,
|
||||
...others,
|
||||
};
|
||||
}
|
||||
exports.default = combineModuleDev;
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const tslib_1 = require("tslib");
|
||||
tslib_1.__exportStar(require("./combine.dev"), exports);
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
import { AsyncContext } from "../../store/AsyncRowStore";
|
||||
import { SyncContext } from "../../store/SyncRowStore";
|
||||
import { EntityDict } from "../../types/Entity";
|
||||
import { EntityDict as BaseEntityDict } from '../../base-app-domain';
|
||||
export default function combineModuleDev<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(...modules: string[]): {
|
||||
checkers: import("../../types").Checker<ED, keyof ED, never>[];
|
||||
common: import("../../types/Configuration").CommonConfiguration<ED>;
|
||||
};
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const tslib_1 = require("tslib");
|
||||
const combine_common_1 = tslib_1.__importDefault(require("./combine.common"));
|
||||
function combineModuleDev(...modules) {
|
||||
return (0, combine_common_1.default)(...modules);
|
||||
}
|
||||
exports.default = combineModuleDev;
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
import { AsyncContext } from "../../store/AsyncRowStore";
|
||||
import { SyncContext } from "../../store/SyncRowStore";
|
||||
import { EntityDict } from "../../types/Entity";
|
||||
import { EntityDict as BaseEntityDict } from '../../base-app-domain';
|
||||
import { Aspect, Exportation, Importation, Routine, Timer, Trigger, Watcher } from '../../types';
|
||||
export default function combineModuleServer<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(...modules: string[]): {
|
||||
aspectDict: Record<string, Aspect<ED, Cxt>>;
|
||||
data: { [T in keyof ED]?: ED[T]["OpSchema"][] | undefined; };
|
||||
importations: Importation<ED, keyof ED, string, Cxt>;
|
||||
exportations: Exportation<ED, keyof ED, string, Cxt>;
|
||||
watchers: Watcher<ED, keyof ED, Cxt>[];
|
||||
timers: Timer<ED, keyof ED, Cxt>[];
|
||||
startRoutines: Routine<ED, keyof ED, Cxt>[];
|
||||
triggers: Trigger<ED, keyof ED, Cxt>[];
|
||||
checkers: import("../../types").Checker<ED, keyof ED, never>[];
|
||||
common: import("../../types/Configuration").CommonConfiguration<ED>;
|
||||
};
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const tslib_1 = require("tslib");
|
||||
const combine_common_1 = tslib_1.__importDefault(require("./combine.common"));
|
||||
const lodash_1 = require("../../utils/lodash");
|
||||
const assert_1 = tslib_1.__importDefault(require("assert"));
|
||||
function combineModuleServer(...modules) {
|
||||
const { checkers, common } = (0, combine_common_1.default)(...modules);
|
||||
const others = modules.map((module) => ({
|
||||
triggers: require(`${module}/lib/triggers`).default,
|
||||
aspectDict: require(`${module}/lib/aspects`).default,
|
||||
watchers: require(`${module}/lib/watchers`).default,
|
||||
timers: require(`${module}/lib/timers`).default,
|
||||
startRoutines: require(`${module}/lib/routines/start`).default,
|
||||
importations: require(`${module}/lib/ports`).importations,
|
||||
exportations: require(`${module}/lib/ports`).exportations,
|
||||
data: require(`${module}/lib/data`).default,
|
||||
})).reduce((prev, current, index) => {
|
||||
const check = (module, name) => {
|
||||
(0, assert_1.default)(typeof module.aspectDict === 'object', `${name}模块中的aspectDict不是对象`);
|
||||
(0, assert_1.default)(typeof module.data === 'object', `${name}模块中的data不是对象`);
|
||||
(0, assert_1.default)(module.exportations instanceof Array, `${name}模块中的exportations不是数组`);
|
||||
(0, assert_1.default)(module.importations instanceof Array, `${name}模块中的importations不是数组`);
|
||||
(0, assert_1.default)(module.watchers instanceof Array, `${name}模块中的watchers不是数组`);
|
||||
(0, assert_1.default)(module.timers instanceof Array, `${name}模块中的timers不是数组`);
|
||||
(0, assert_1.default)(module.triggers instanceof Array, `${name}模块中的triggers不是数组`);
|
||||
(0, assert_1.default)(module.startRoutines instanceof Array, `${name}模块中的startRoutines不是数组`);
|
||||
};
|
||||
if (index === 1) {
|
||||
check(prev, modules[0]);
|
||||
}
|
||||
check(current, modules[index]);
|
||||
// aspectDict中不应当有同名对象
|
||||
const its = (0, lodash_1.intersection)(Object.keys(prev.aspectDict), Object.keys(current.aspectDict));
|
||||
if (its.length > 0) {
|
||||
throw new Error(`模块${modules[index]}的aspectDict中,存在和其它模块同步的aspect【${its.join(',')}】,请正确处理`);
|
||||
}
|
||||
return {
|
||||
aspectDict: (0, lodash_1.mergeConcatArray)(prev.aspectDict, current.aspectDict),
|
||||
data: (0, lodash_1.mergeConcatArray)(prev.data, current.data),
|
||||
importations: (0, lodash_1.mergeConcatArray)(prev.importations, current.importations),
|
||||
exportations: (0, lodash_1.mergeConcatArray)(prev.exportations, current.exportations),
|
||||
watchers: (0, lodash_1.mergeConcatArray)(prev.watchers, current.watchers),
|
||||
timers: (0, lodash_1.merge)(prev.timers, current.timers),
|
||||
startRoutines: (0, lodash_1.merge)(prev.startRoutines, current.startRoutines),
|
||||
triggers: (0, lodash_1.merge)(prev.triggers, current.triggers),
|
||||
};
|
||||
});
|
||||
return {
|
||||
checkers,
|
||||
common,
|
||||
...others,
|
||||
};
|
||||
}
|
||||
exports.default = combineModuleServer;
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -34,11 +34,7 @@ export const NUMERICAL_LITERL_DEFAULT_PRECISION = 8;
|
|||
export const NUMERICAL_LITERL_DEFAULT_SCALE = 2;
|
||||
export const INT_LITERL_DEFAULT_WIDTH = 4;
|
||||
|
||||
// 暂放在这儿
|
||||
|
||||
// 项目依赖的第三方oak lib配置文件所在的固定路径
|
||||
export const OAK_EXTERNAL_LIBS_FILEPATH = (path: string) => {
|
||||
return Path.join(path, 'config/oakExternalLib.json');
|
||||
}
|
||||
export * from './entities';
|
||||
|
||||
export * from './entities';
|
||||
export const OAK_CLI_MODULE_NAME = '@xuchangzju/oak-cli';
|
||||
|
|
@ -4,8 +4,8 @@ const { factory } = ts;
|
|||
import { join } from 'path';
|
||||
import { Hash, createHash } from 'crypto';
|
||||
import fs from 'fs';
|
||||
import { OAK_EXTERNAL_LIBS_FILEPATH } from './env';
|
||||
import { firstLetterLowerCase, unescapeUnicode } from '../utils/string';
|
||||
import { analyzeDepedency } from './dependencyBuilder';
|
||||
|
||||
/**
|
||||
* 将一个object展开编译为一棵语法树,只有string和object两种键值对象
|
||||
|
|
@ -47,9 +47,10 @@ export default class LocaleBuilder {
|
|||
const pwd = process.cwd();
|
||||
this.pwd = pwd;
|
||||
this.asLib = !!asLib;
|
||||
const dependencyFile = OAK_EXTERNAL_LIBS_FILEPATH(join(pwd, 'src'));
|
||||
if (fs.existsSync(dependencyFile)) {
|
||||
this.dependencies = require(dependencyFile);
|
||||
const dependencyConfigureFile = join(pwd, 'src', 'configuration', 'dependency.ts');
|
||||
if (fs.existsSync(dependencyConfigureFile)) {
|
||||
const depGraph = analyzeDepedency(pwd);
|
||||
this.dependencies = depGraph.ascOrder;
|
||||
}
|
||||
else {
|
||||
this.dependencies = [];
|
||||
|
|
@ -218,9 +219,14 @@ export default class LocaleBuilder {
|
|||
|
||||
if (watch) {
|
||||
fs.watch(filepath, () => {
|
||||
const data = this.readLocaleFileContent(filepath);
|
||||
this.locales[ns] = [module, position.replace(/\\/g, '/'), language, data];
|
||||
this.outputDataFile();
|
||||
try {
|
||||
const data = this.readLocaleFileContent(filepath);
|
||||
this.locales[ns] = [module, position.replace(/\\/g, '/'), language, data];
|
||||
this.outputDataFile();
|
||||
}
|
||||
catch(err) {
|
||||
// 啥都不干
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import { readdirSync, statSync, existsSync, writeFileSync } from 'fs-extra';
|
|||
import assert from 'assert';
|
||||
import * as ts from 'typescript';
|
||||
import NodeWatch from 'node-watch';
|
||||
import { OAK_CLI_MODULE_NAME } from './env';
|
||||
const { factory } = ts;
|
||||
|
||||
/**
|
||||
|
|
@ -37,7 +38,7 @@ const { factory } = ts;
|
|||
* -------index.json (此namespace下的配置)
|
||||
* -------pageMap.json (编译器将pageMap注入到这里)
|
||||
* --router
|
||||
* ----index.ts (编译器将router.ts注入到这里)
|
||||
* ----index.ts (编译器将allRouters.ts注入到这里)
|
||||
*
|
||||
*/
|
||||
|
||||
|
|
@ -351,61 +352,46 @@ function judgeUseOakRouterBuilder(statements: ts.NodeArray<ts.Statement>) {
|
|||
}
|
||||
|
||||
function outputInWebAppDir(appDir: string) {
|
||||
const routerFileName = join(appDir, 'router', 'allRouters.ts');
|
||||
const templateFileName = join(appDir, 'router', 'allRoutersTemplate.ts');
|
||||
if (existsSync(templateFileName)) {
|
||||
const program = ts.createProgram([templateFileName], {
|
||||
removeComments: false,
|
||||
});
|
||||
const routerFile = program.getSourceFile(templateFileName);
|
||||
assert(routerFile);
|
||||
const namespaceDir = join(appDir, 'namespaces');
|
||||
const { statements } = routerFile;
|
||||
if (judgeUseOakRouterBuilder(statements)) {
|
||||
statements.forEach(
|
||||
(statement) => {
|
||||
if (ts.isVariableStatement(statement)) {
|
||||
const declaration = statement.declarationList.declarations.find(
|
||||
declaration => ts.isIdentifier(declaration.name) && declaration.name.text === 'allRouters'
|
||||
);
|
||||
if (declaration) {
|
||||
Object.assign(declaration, {
|
||||
initializer: makeWebAllRouters(namespaceDir, join(appDir, '../../../..'), dirname(templateFileName))
|
||||
});
|
||||
}
|
||||
const routerFileName = join(appDir, 'routers', 'allRouters.ts');
|
||||
const templateFileName = join(appDir, '../../..', 'node_modules', OAK_CLI_MODULE_NAME, 'templateFiles', 'allRouters.ts');
|
||||
const program = ts.createProgram([templateFileName], {
|
||||
removeComments: false,
|
||||
});
|
||||
const routerFile = program.getSourceFile(templateFileName);
|
||||
assert(routerFile);
|
||||
const namespaceDir = join(appDir, 'namespaces');
|
||||
const { statements } = routerFile;
|
||||
if (judgeUseOakRouterBuilder(statements)) {
|
||||
statements.forEach(
|
||||
(statement) => {
|
||||
if (ts.isVariableStatement(statement)) {
|
||||
const declaration = statement.declarationList.declarations.find(
|
||||
declaration => ts.isIdentifier(declaration.name) && declaration.name.text === 'allRouters'
|
||||
);
|
||||
if (declaration) {
|
||||
Object.assign(declaration, {
|
||||
initializer: makeWebAllRouters(namespaceDir, join(appDir, '../../..'), dirname(routerFileName))
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
const printer = ts.createPrinter({ newLine: ts.NewLineKind.LineFeed, removeComments: false });
|
||||
const result = printer.printNode(
|
||||
ts.EmitHint.Unspecified,
|
||||
routerFile,
|
||||
routerFile,
|
||||
);
|
||||
const printer = ts.createPrinter({ newLine: ts.NewLineKind.LineFeed, removeComments: false });
|
||||
const result = printer.printNode(
|
||||
ts.EmitHint.Unspecified,
|
||||
routerFile,
|
||||
routerFile,
|
||||
);
|
||||
|
||||
writeFileSync(routerFileName, result);
|
||||
}
|
||||
}
|
||||
else {
|
||||
console.warn(`${appDir}的目录结构未按照标准建立,缺少了${templateFileName},请从模板中补充`);
|
||||
writeFileSync(routerFileName, result);
|
||||
}
|
||||
}
|
||||
|
||||
function outputInWebDir(dir: string) {
|
||||
const srcAppDir = join(dir, 'src', 'app');
|
||||
const apps = readdirSync(srcAppDir);
|
||||
apps.forEach(
|
||||
(app) => {
|
||||
const appDir = join(srcAppDir, app);
|
||||
const stat = statSync(appDir);
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
outputInWebAppDir(appDir);
|
||||
}
|
||||
}
|
||||
)
|
||||
outputInWebAppDir(srcAppDir);
|
||||
}
|
||||
|
||||
function watchDir(projectDir: string, startupDir: string, type: 'native' | 'web' | 'wechatMp') {
|
||||
|
|
@ -415,7 +401,7 @@ function watchDir(projectDir: string, startupDir: string, type: 'native' | 'web'
|
|||
if (startupDir.startsWith('web')) {
|
||||
const srcAppDir = join(projectDir, startupDir, 'src', 'app');
|
||||
const apps = readdirSync(srcAppDir);
|
||||
const tryOutputAppDir = (ns: string) => {
|
||||
const tryOutputAppDir = (ns: string) => {
|
||||
apps.forEach(
|
||||
(app) => {
|
||||
const appDir = join(srcAppDir, app);
|
||||
|
|
@ -427,7 +413,7 @@ function watchDir(projectDir: string, startupDir: string, type: 'native' | 'web'
|
|||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
NodeWatch(srcPageDir, {
|
||||
recursive: true,
|
||||
filter: new RegExp('web\.tsx|web\.pc\.tsx|index\.xml|render\.(native|ios|android)\.tsx'),
|
||||
|
|
|
|||
|
|
@ -98,6 +98,10 @@ const ActionImportStatements = () => [
|
|||
)
|
||||
];
|
||||
|
||||
const StyleAsts: {
|
||||
[module: string]: ts.ObjectLiteralExpression;
|
||||
} = {};
|
||||
|
||||
const ActionAsts: {
|
||||
[module: string]: {
|
||||
statements: Array<ts.Statement>;
|
||||
|
|
@ -118,11 +122,21 @@ const SchemaAsts: {
|
|||
};
|
||||
} = {};
|
||||
|
||||
/**
|
||||
* 部分项目目前存在引用了Schema但是不依赖于其包,因此目前先在此去重。
|
||||
* 后续要修正这种行为,让继承的schema分层编译
|
||||
* @param many
|
||||
* @param one
|
||||
* @param key
|
||||
* @param notNull
|
||||
*/
|
||||
function addRelationship(many: string, one: string, key: string, notNull: boolean) {
|
||||
const { [many]: manySet } = ManyToOne;
|
||||
const one2 = one === 'Schema' ? many : one;
|
||||
if (manySet) {
|
||||
manySet.push([one2, key, notNull]);
|
||||
if (!manySet.find(ele => ele[1] === key && ele[0] === one2)) {
|
||||
manySet.push([one2, key, notNull]);
|
||||
}
|
||||
}
|
||||
else {
|
||||
assign(ManyToOne, {
|
||||
|
|
@ -132,7 +146,9 @@ function addRelationship(many: string, one: string, key: string, notNull: boolea
|
|||
|
||||
const { [one2]: oneSet } = OneToMany;
|
||||
if (oneSet) {
|
||||
oneSet.push([many, key, notNull]);
|
||||
if (!oneSet.find(ele => ele[1] === key && ele[0] === many)) {
|
||||
oneSet.push([many, key, notNull]);
|
||||
}
|
||||
}
|
||||
else {
|
||||
assign(OneToMany, {
|
||||
|
|
@ -932,7 +948,9 @@ function analyzeSchemaDefinition(
|
|||
// 这也是一对多的反指定义
|
||||
const reverseEntity = typeName.text;
|
||||
if (ReversePointerRelations[reverseEntity]) {
|
||||
ReversePointerRelations[reverseEntity].push(moduleName);
|
||||
if (!ReversePointerRelations[reverseEntity].includes(moduleName)) {
|
||||
ReversePointerRelations[reverseEntity].push(moduleName);
|
||||
}
|
||||
}
|
||||
else {
|
||||
assign(ReversePointerRelations, {
|
||||
|
|
@ -1091,6 +1109,7 @@ function analyzeEntity(filename: string, path: string, program: ts.Program, rela
|
|||
if (Schema.hasOwnProperty(moduleName)) {
|
||||
delete ActionAsts[moduleName];
|
||||
delete SchemaAsts[moduleName];
|
||||
delete StyleAsts[moduleName];
|
||||
// removeFromRelationShip(moduleName);
|
||||
console.warn(`出现了同名的Entity定义「${moduleName}」,将使用${fullPath}取代掉默认对象,请检查新的对象结构及相关常量定义与原有的兼容,否则原有对象的相关逻辑会出现不可知异常`);
|
||||
}
|
||||
|
|
@ -1203,13 +1222,13 @@ function analyzeEntity(filename: string, path: string, program: ts.Program, rela
|
|||
assert(node.type.literal.text.length < STRING_LITERAL_MAX_LENGTH, `Relation定义的字符串长度不长于${STRING_LITERAL_MAX_LENGTH}(${filename},${node.type.literal.text})`);
|
||||
relationValues.push(node.type.literal.text);
|
||||
}
|
||||
else if (ts.isTypeReferenceNode(node.type)){
|
||||
else if (ts.isTypeReferenceNode(node.type)) {
|
||||
const relationStrings = tryGetStringLiteralValues(moduleName, filename, 'relation', node.type, program);
|
||||
assert(relationStrings.length > 0);
|
||||
relationValues.push(...relationStrings);
|
||||
}
|
||||
else {
|
||||
assert (ts.isUnionTypeNode(node.type), `Relation的定义只能是string类型,或者string union类型,或者两者的union(${filename})`);
|
||||
assert(ts.isUnionTypeNode(node.type), `Relation的定义只能是string类型,或者string union类型,或者两者的union(${filename})`);
|
||||
node.type.types.forEach(
|
||||
(ele) => {
|
||||
if (ts.isLiteralTypeNode(ele)) {
|
||||
|
|
@ -1218,7 +1237,7 @@ function analyzeEntity(filename: string, path: string, program: ts.Program, rela
|
|||
relationValues.push(ele.literal.text);
|
||||
}
|
||||
else {
|
||||
assert (ts.isTypeReferenceNode(ele), `Relation的定义只能是string类型,或者string union类型,或者两者的union(${filename})`);
|
||||
assert(ts.isTypeReferenceNode(ele), `Relation的定义只能是string类型,或者string union类型,或者两者的union(${filename})`);
|
||||
const relationStrings = tryGetStringLiteralValues(moduleName, filename, 'relation', ele, program);
|
||||
assert(relationStrings.length > 0);
|
||||
relationValues.push(...relationStrings);
|
||||
|
|
@ -1513,6 +1532,9 @@ function analyzeEntity(filename: string, path: string, program: ts.Program, rela
|
|||
_static = true; // static如果有值只能为true
|
||||
}
|
||||
};
|
||||
const dealWithStyleDesc = (declaration: ts.ObjectLiteralExpression) => {
|
||||
StyleAsts[moduleName] = declaration;
|
||||
}
|
||||
const dealWithEntityDesc = (declaration: ts.Expression) => {
|
||||
if (ts.isObjectLiteralExpression(declaration)) {
|
||||
const { properties } = declaration;
|
||||
|
|
@ -1538,6 +1560,14 @@ function analyzeEntity(filename: string, path: string, program: ts.Program, rela
|
|||
assert(ts.isPropertyAssignment(configurationProperty));
|
||||
dealWithConfiguration(configurationProperty.initializer as ts.ObjectLiteralExpression);
|
||||
}
|
||||
|
||||
const styleDescProperty = properties.find(
|
||||
ele => ts.isPropertyAssignment(ele) && ts.isIdentifier(ele.name) && ele.name.text === 'style'
|
||||
);
|
||||
if (styleDescProperty) {
|
||||
assert(ts.isPropertyAssignment(styleDescProperty!));
|
||||
dealWithStyleDesc(styleDescProperty.initializer as ts.ObjectLiteralExpression);
|
||||
}
|
||||
}
|
||||
else if (ts.isIdentifier(declaration)) {
|
||||
const checker = program.getTypeChecker();
|
||||
|
|
@ -1549,9 +1579,6 @@ function analyzeEntity(filename: string, path: string, program: ts.Program, rela
|
|||
*/
|
||||
|
||||
assert(false, '用变量赋值给entityDesc暂时还解析不了');
|
||||
|
||||
|
||||
console.log(original);
|
||||
}
|
||||
};
|
||||
declarations.forEach(
|
||||
|
|
@ -5765,7 +5792,7 @@ function outputSchema(outputDir: string, printer: ts.Printer) {
|
|||
|
||||
function outputAction(outputDir: string, printer: ts.Printer) {
|
||||
const actionDictStatements: ts.Statement[] = [];
|
||||
const propertyAssignments: ts.PropertyAssignment[] = [];
|
||||
const propertyAssignments: ts.ShorthandPropertyAssignment[] = [];
|
||||
for (const entity in ActionAsts) {
|
||||
const { sourceFile, statements, importActionFrom, importStateFrom, importActionDefFrom, actionDefNames } = ActionAsts[entity];
|
||||
const importStatements: ts.Statement[] = [];
|
||||
|
|
@ -5833,7 +5860,7 @@ function outputAction(outputDir: string, printer: ts.Printer) {
|
|||
[factory.createModifier(ts.SyntaxKind.ExportKeyword)],
|
||||
factory.createVariableDeclarationList(
|
||||
[factory.createVariableDeclaration(
|
||||
factory.createIdentifier("ActionDefDict"),
|
||||
factory.createIdentifier("actionDefDict"),
|
||||
undefined,
|
||||
undefined,
|
||||
factory.createObjectLiteralExpression(
|
||||
|
|
@ -5873,17 +5900,16 @@ function outputAction(outputDir: string, printer: ts.Printer) {
|
|||
undefined,
|
||||
factory.createNamedImports([factory.createImportSpecifier(
|
||||
false,
|
||||
factory.createIdentifier("ActionDefDict"),
|
||||
factory.createIdentifier(entity)
|
||||
factory.createIdentifier("actionDefDict"),
|
||||
factory.createIdentifier(firstLetterLowerCase(entity))
|
||||
)])
|
||||
),
|
||||
factory.createStringLiteral(`./${entity}/Action`)
|
||||
)
|
||||
);
|
||||
propertyAssignments.push(
|
||||
factory.createPropertyAssignment(
|
||||
factory.createShorthandPropertyAssignment(
|
||||
factory.createIdentifier(firstLetterLowerCase(entity)),
|
||||
factory.createIdentifier(entity)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
|
@ -5893,7 +5919,7 @@ function outputAction(outputDir: string, printer: ts.Printer) {
|
|||
[factory.createModifier(ts.SyntaxKind.ExportKeyword)],
|
||||
factory.createVariableDeclarationList(
|
||||
[factory.createVariableDeclaration(
|
||||
factory.createIdentifier("ActionDefDict"),
|
||||
factory.createIdentifier("actionDefDict"),
|
||||
undefined,
|
||||
undefined,
|
||||
factory.createObjectLiteralExpression(
|
||||
|
|
@ -6738,6 +6764,7 @@ function outputIndexTs(outputDir: string) {
|
|||
export * from './Storage';
|
||||
export * from './ActionDefDict';
|
||||
export * from './Relation';
|
||||
export * from './StyleDict';
|
||||
`;
|
||||
const filename = PathLib.join(outputDir, 'index.ts');
|
||||
writeFileSync(filename, indexTs, { flag: 'w' });
|
||||
|
|
@ -7398,6 +7425,168 @@ function outputRelation2(outputDir: string, printer: ts.Printer) {
|
|||
writeFileSync(filename, result, { flag: 'w' });
|
||||
}
|
||||
|
||||
/**
|
||||
* 输出oak-app-domain中的StyleDict.ts文件
|
||||
* @param outputDir
|
||||
* @param printer
|
||||
*/
|
||||
function outputStyleDict(outputDir: string, printer: ts.Printer) {
|
||||
for (const entity in StyleAsts) {
|
||||
const stmts: ts.Statement[] = [
|
||||
factory.createImportDeclaration(
|
||||
undefined,
|
||||
factory.createImportClause(
|
||||
false,
|
||||
undefined,
|
||||
factory.createNamedImports([
|
||||
factory.createImportSpecifier(
|
||||
false,
|
||||
undefined,
|
||||
factory.createIdentifier("EntityDef")
|
||||
)
|
||||
])
|
||||
),
|
||||
factory.createStringLiteral("./Schema"),
|
||||
undefined
|
||||
),
|
||||
factory.createImportDeclaration(
|
||||
undefined,
|
||||
factory.createImportClause(
|
||||
false,
|
||||
undefined,
|
||||
factory.createNamedImports([factory.createImportSpecifier(
|
||||
false,
|
||||
undefined,
|
||||
factory.createIdentifier("StyleDef")
|
||||
)])
|
||||
),
|
||||
factory.createStringLiteral(`${TYPE_PATH_IN_OAK_DOMAIN()}Style`),
|
||||
undefined
|
||||
),
|
||||
factory.createVariableStatement(
|
||||
[factory.createToken(ts.SyntaxKind.ExportKeyword)],
|
||||
factory.createVariableDeclarationList(
|
||||
[factory.createVariableDeclaration(
|
||||
factory.createIdentifier("style"),
|
||||
undefined,
|
||||
factory.createTypeReferenceNode(
|
||||
factory.createIdentifier("StyleDef"),
|
||||
[
|
||||
factory.createIndexedAccessTypeNode(
|
||||
factory.createTypeReferenceNode(
|
||||
factory.createIdentifier("EntityDef"),
|
||||
undefined
|
||||
),
|
||||
factory.createLiteralTypeNode(factory.createStringLiteral("OpSchema"))
|
||||
),
|
||||
factory.createIndexedAccessTypeNode(
|
||||
factory.createTypeReferenceNode(
|
||||
factory.createIdentifier("EntityDef"),
|
||||
undefined
|
||||
),
|
||||
factory.createLiteralTypeNode(factory.createStringLiteral("Action"))
|
||||
)
|
||||
]
|
||||
),
|
||||
StyleAsts[entity]
|
||||
)],
|
||||
ts.NodeFlags.Const
|
||||
)
|
||||
)
|
||||
];
|
||||
const { sourceFile } = Schema[entity];
|
||||
|
||||
const result = printer.printList(
|
||||
ts.ListFormat.SourceFileStatements,
|
||||
factory.createNodeArray(stmts),
|
||||
sourceFile);
|
||||
const filename = PathLib.join(outputDir, entity, 'Style.ts');
|
||||
writeFileSync(filename, result, { flag: 'w' });
|
||||
}
|
||||
|
||||
const stmts: ts.Statement[] = [
|
||||
factory.createImportDeclaration(
|
||||
undefined,
|
||||
factory.createImportClause(
|
||||
false,
|
||||
undefined,
|
||||
factory.createNamedImports([
|
||||
factory.createImportSpecifier(
|
||||
false,
|
||||
undefined,
|
||||
factory.createIdentifier("EntityDict")
|
||||
)
|
||||
])
|
||||
),
|
||||
factory.createStringLiteral("./EntityDict"),
|
||||
undefined
|
||||
),
|
||||
factory.createImportDeclaration(
|
||||
undefined,
|
||||
factory.createImportClause(
|
||||
false,
|
||||
undefined,
|
||||
factory.createNamedImports([factory.createImportSpecifier(
|
||||
false,
|
||||
undefined,
|
||||
factory.createIdentifier("StyleDict")
|
||||
)])
|
||||
),
|
||||
factory.createStringLiteral(`${TYPE_PATH_IN_OAK_DOMAIN(1)}Style`),
|
||||
undefined
|
||||
),
|
||||
...Object.keys(StyleAsts).map(
|
||||
(entity) => factory.createImportDeclaration(
|
||||
undefined,
|
||||
factory.createImportClause(
|
||||
false,
|
||||
undefined,
|
||||
factory.createNamedImports([factory.createImportSpecifier(
|
||||
false,
|
||||
factory.createIdentifier("style"),
|
||||
factory.createIdentifier(firstLetterLowerCase(entity))
|
||||
)])
|
||||
),
|
||||
factory.createStringLiteral(`./${entity}/Style`),
|
||||
undefined
|
||||
)
|
||||
),
|
||||
factory.createVariableStatement(
|
||||
[factory.createToken(ts.SyntaxKind.ExportKeyword)],
|
||||
factory.createVariableDeclarationList(
|
||||
[factory.createVariableDeclaration(
|
||||
factory.createIdentifier("styleDict"),
|
||||
undefined,
|
||||
factory.createTypeReferenceNode(
|
||||
factory.createIdentifier("StyleDict"),
|
||||
[factory.createTypeReferenceNode(
|
||||
factory.createIdentifier("EntityDict"),
|
||||
undefined
|
||||
)]
|
||||
),
|
||||
factory.createObjectLiteralExpression(
|
||||
Object.keys(StyleAsts).map(
|
||||
(entity) => factory.createShorthandPropertyAssignment(
|
||||
factory.createIdentifier(firstLetterLowerCase(entity)),
|
||||
undefined
|
||||
)
|
||||
),
|
||||
true
|
||||
)
|
||||
)],
|
||||
ts.NodeFlags.Const
|
||||
)
|
||||
)
|
||||
];
|
||||
|
||||
const result = printer.printList(
|
||||
ts.ListFormat.SourceFileStatements,
|
||||
factory.createNodeArray(stmts),
|
||||
ts.createSourceFile("someFileName.ts", "", ts.ScriptTarget.Latest, /*setParentNodes*/ false, ts.ScriptKind.TS));
|
||||
const filename = PathLib.join(outputDir, 'StyleDict.ts');
|
||||
writeFileSync(filename, result, { flag: 'w' });
|
||||
}
|
||||
|
||||
export function analyzeEntities(inputDir: string, relativePath?: string) {
|
||||
const files = readdirSync(inputDir);
|
||||
const fullFilenames = files.map(
|
||||
|
|
@ -7435,6 +7624,7 @@ export function buildSchema(outputDir: string): void {
|
|||
outputEntityDict(outputDir, printer);
|
||||
outputStorage(outputDir, printer);
|
||||
outputRelation2(outputDir, printer);
|
||||
outputStyleDict(outputDir, printer);
|
||||
outputIndexTs(outputDir);
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -66,4 +66,17 @@ const entityDesc: EntityDesc<Schema, Action, '', {
|
|||
],
|
||||
},
|
||||
],
|
||||
style: {
|
||||
icon: {
|
||||
apply: '',
|
||||
abandon: '',
|
||||
},
|
||||
color: {
|
||||
iState: {
|
||||
active: '#0000FF',
|
||||
applied: '#008000',
|
||||
abandoned: '#A9A9A9',
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -45,5 +45,16 @@ export const entityDesc: EntityDesc<Schema, Action, '', {
|
|||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
style: {
|
||||
icon: {
|
||||
mergeTo: '',
|
||||
},
|
||||
color: {
|
||||
userState: {
|
||||
normal: '#112233',
|
||||
merged: '#223344',
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
@ -1,12 +1,13 @@
|
|||
import { ActionDictOfEntityDict, Checker, EntityDict, StorageSchema, RowChecker, OakUniqueViolationException, CHECKER_MAX_PRIORITY, AttrUpdateMatrix, LogicalChecker, OakAttrCantUpdateException } from "../types";
|
||||
import { ActionDefDict, Checker, EntityDict, StorageSchema, RowChecker, OakUniqueViolationException, CHECKER_MAX_PRIORITY, AttrUpdateMatrix, LogicalChecker, OakAttrCantUpdateException } from "../types";
|
||||
import { SyncContext } from "./SyncRowStore";
|
||||
import { AsyncContext } from "./AsyncRowStore";
|
||||
import { pick, intersection, difference } from '../utils/lodash';
|
||||
import { checkFilterContains, combineFilters } from "./filter";
|
||||
import { pick, intersection, difference, omit } from '../utils/lodash';
|
||||
import { analyzeFilterRelation, checkFilterContains, combineFilters, contains } from "./filter";
|
||||
import { EntityDict as BaseEntityDict } from '../base-app-domain/EntityDict';
|
||||
import { createModiRelatedCheckers } from "./modi";
|
||||
import { createCreateCheckers, createRemoveCheckers } from "./checker";
|
||||
import { readOnlyActions } from "../actions/action";
|
||||
import assert from 'assert';
|
||||
|
||||
|
||||
function checkUniqueBetweenRows(rows: Record<string, any>[], uniqAttrs: string[]) {
|
||||
|
|
@ -202,7 +203,7 @@ function createUniqueCheckers<ED extends EntityDict & BaseEntityDict, Cxt extend
|
|||
}
|
||||
|
||||
function createActionTransformerCheckers<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(
|
||||
actionDefDict: ActionDictOfEntityDict<ED>
|
||||
actionDefDict: ActionDefDict<ED>
|
||||
) {
|
||||
const checkers: Array<Checker<ED, keyof ED, Cxt | FrontCxt>> = [];
|
||||
for (const entity in actionDefDict) {
|
||||
|
|
@ -229,21 +230,23 @@ function createActionTransformerCheckers<ED extends EntityDict & BaseEntityDict,
|
|||
// 这里用data类型的checker改数据了不太好,先这样
|
||||
checkers.push({
|
||||
action: action as any,
|
||||
type: 'logicalData',
|
||||
type: 'logical',
|
||||
entity,
|
||||
checker: (operation) => {
|
||||
const { data } = operation;
|
||||
if (data instanceof Array) {
|
||||
data.forEach(
|
||||
(d) => Object.assign(d, {
|
||||
if (data) {
|
||||
if (data instanceof Array) {
|
||||
data.forEach(
|
||||
(d) => Object.assign(d, {
|
||||
[attr]: stm[action][1],
|
||||
})
|
||||
);
|
||||
}
|
||||
else {
|
||||
Object.assign(data, {
|
||||
[attr]: stm[action][1],
|
||||
})
|
||||
);
|
||||
}
|
||||
else {
|
||||
Object.assign(data, {
|
||||
[attr]: stm[action][1],
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
@ -252,7 +255,7 @@ function createActionTransformerCheckers<ED extends EntityDict & BaseEntityDict,
|
|||
if (is) {
|
||||
checkers.push({
|
||||
action: 'create' as ED[keyof ED]['Action'],
|
||||
type: 'logicalData',
|
||||
type: 'logical',
|
||||
entity,
|
||||
priority: 10, // 优先级要高,先于真正的data检查进行
|
||||
checker: (operation) => {
|
||||
|
|
@ -268,7 +271,7 @@ function createActionTransformerCheckers<ED extends EntityDict & BaseEntityDict,
|
|||
}
|
||||
);
|
||||
}
|
||||
else {
|
||||
else if (data) {
|
||||
if (!(data as Readonly<ED[keyof ED]['CreateSingle']['data']>)[attr]) {
|
||||
Object.assign(data, {
|
||||
[attr]: is,
|
||||
|
|
@ -284,6 +287,82 @@ function createActionTransformerCheckers<ED extends EntityDict & BaseEntityDict,
|
|||
return checkers;
|
||||
}
|
||||
|
||||
/**
|
||||
* 检查一次更新是否有关联通过的可能
|
||||
* 例如,更新A的条件是B = 1,此时行上的B并不等于1,但由于更新数据是: { B: 1, A: .. }
|
||||
* 此时如果B更新可以成功则A也可以成功
|
||||
* @param entity
|
||||
* @param data
|
||||
* @param filters
|
||||
* @param context
|
||||
*/
|
||||
function cascadelyCheckUpdateFilters<ED extends EntityDict & BaseEntityDict, T extends keyof ED, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(
|
||||
entity: T,
|
||||
schema: StorageSchema<ED>,
|
||||
data: ED[T]['Update']['data'],
|
||||
filter: ED[T]['Update']['filter'],
|
||||
matrix: NonNullable<AttrUpdateMatrix<ED>[T]>,
|
||||
restAttrs: string[],
|
||||
context: Cxt | FrontCxt
|
||||
): void | Promise<void> {
|
||||
const successAttrs = difference(Object.keys(data), restAttrs);
|
||||
const successAttrFilter = pick(data, successAttrs);
|
||||
/**
|
||||
* 先找到能直接更新成功的属性
|
||||
*/
|
||||
const legalAttrResult = restAttrs.map(
|
||||
(attr) => {
|
||||
const { filter: f } = matrix[attr]!;
|
||||
if (!f) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// 此时看应用了success的attributes更新后,能否消除掉f中的部分条件
|
||||
const result = analyzeFilterRelation(entity, schema, successAttrFilter, f, true);
|
||||
if (typeof result === 'boolean') {
|
||||
return result;
|
||||
}
|
||||
const { sureAttributes } = result;
|
||||
const f2 = omit(f, sureAttributes);
|
||||
|
||||
return checkFilterContains<ED, keyof ED, Cxt | FrontCxt>(entity, context, f2, filter, true);
|
||||
}
|
||||
);
|
||||
|
||||
const checkResult1 = (lar: boolean[]) => {
|
||||
const legalAttrs: string[] = [];
|
||||
const illegalAttrs: string[] = [];
|
||||
|
||||
|
||||
assert(lar.length === restAttrs.length);
|
||||
lar.forEach(
|
||||
(ele, idx) => {
|
||||
if (ele) {
|
||||
legalAttrs.push(restAttrs[idx]);
|
||||
}
|
||||
else {
|
||||
illegalAttrs.push(restAttrs[idx]);
|
||||
}
|
||||
}
|
||||
);
|
||||
if (illegalAttrs.length === 0) {
|
||||
return;
|
||||
}
|
||||
if (legalAttrs.length === 0) {
|
||||
throw new OakAttrCantUpdateException(entity as keyof ED, illegalAttrs, '更新的行当前属性不满足约束,请仔细检查数据');
|
||||
}
|
||||
return cascadelyCheckUpdateFilters(entity, schema, data, filter, matrix, illegalAttrs, context);
|
||||
};
|
||||
|
||||
if (legalAttrResult.find(ele => ele instanceof Promise)) {
|
||||
return Promise.all(legalAttrResult).then(
|
||||
(lar) => checkResult1(lar)
|
||||
);
|
||||
}
|
||||
|
||||
return checkResult1(legalAttrResult as boolean[]);
|
||||
}
|
||||
|
||||
function createAttrUpdateCheckers<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(
|
||||
schema: StorageSchema<ED>,
|
||||
attrUpdateMatrix: AttrUpdateMatrix<ED>
|
||||
|
|
@ -321,21 +400,31 @@ function createAttrUpdateCheckers<ED extends EntityDict & BaseEntityDict, Cxt ex
|
|||
const attrsIllegal = attrs.filter(
|
||||
(attr) => matrix[attr]?.actions && !matrix[attr]?.actions?.includes(action!)
|
||||
);
|
||||
throw new OakAttrCantUpdateException(entity, attrsIllegal, `${attrsIllegal}不允许被${action}动作更新`);
|
||||
throw new OakAttrCantUpdateException(entity, attrsIllegal, `${attrsIllegal.join(',')}不允许被${action}动作更新`);
|
||||
}
|
||||
}
|
||||
if (f) {
|
||||
const result = checkFilterContains<ED, keyof ED, Cxt>(entity, context as any, f, filter, true);
|
||||
const rr = contains<ED, keyof ED>(entity, context.getSchema(), data, f);
|
||||
console.log(rr);
|
||||
const result = checkFilterContains<ED, keyof ED, Cxt | FrontCxt>(entity, context, f, filter, true);
|
||||
if (result instanceof Promise) {
|
||||
return result.then(
|
||||
(v) => {
|
||||
if (!v) {
|
||||
if (attrs.length > 1) {
|
||||
return cascadelyCheckUpdateFilters(entity, schema, data as ED[keyof ED]['Update']['data'],
|
||||
filter, matrix, attrs, context);
|
||||
}
|
||||
throw new OakAttrCantUpdateException(entity, attrs, '更新的行当前属性不满足约束,请仔细检查数据');
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
if (!result) {
|
||||
if (attrs.length > 1) {
|
||||
return cascadelyCheckUpdateFilters(entity, schema, data as ED[keyof ED]['Update']['data'],
|
||||
filter, matrix, attrs, context);
|
||||
}
|
||||
throw new OakAttrCantUpdateException(entity, attrs, '更新的行当前属性不满足约束,请仔细检查数据');
|
||||
}
|
||||
}
|
||||
|
|
@ -349,7 +438,7 @@ function createAttrUpdateCheckers<ED extends EntityDict & BaseEntityDict, Cxt ex
|
|||
|
||||
export function makeIntrinsicCheckers<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(
|
||||
schema: StorageSchema<ED>,
|
||||
actionDefDict: ActionDictOfEntityDict<ED>,
|
||||
actionDefDict: ActionDefDict<ED>,
|
||||
attrUpdateMatrix?: AttrUpdateMatrix<ED>,
|
||||
) {
|
||||
const checkers: Checker<ED, keyof ED, Cxt | FrontCxt>[] = [];
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { ActionDictOfEntityDict, BBWatcher, Checker, EntityDict, StorageSchema, Trigger, Watcher, AttrUpdateMatrix } from "../types";
|
||||
import { ActionDefDict, BBWatcher, Checker, EntityDict, StorageSchema, Trigger, Watcher, AttrUpdateMatrix } from "../types";
|
||||
import { SyncContext } from "./SyncRowStore";
|
||||
import { AsyncContext } from "./AsyncRowStore";
|
||||
import { EntityDict as BaseEntityDict } from '../base-app-domain/EntityDict';
|
||||
|
|
@ -36,9 +36,9 @@ function createExpiredWatchers<ED extends EntityDict & BaseEntityDict>(schema: S
|
|||
return watchers;
|
||||
}
|
||||
|
||||
export function makeIntrinsicCTWs<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(
|
||||
export function makeIntrinsicLogics<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(
|
||||
schema: StorageSchema<ED>,
|
||||
actionDefDict: ActionDictOfEntityDict<ED>,
|
||||
actionDefDict: ActionDefDict<ED>,
|
||||
attrUpdateMatrix?: AttrUpdateMatrix<ED>,
|
||||
) {
|
||||
const checkers: Checker<ED, keyof ED, Cxt | FrontCxt>[] = makeIntrinsicCheckers<ED, Cxt, FrontCxt>(schema, actionDefDict, attrUpdateMatrix);
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import { AsyncContext } from './AsyncRowStore';
|
|||
import { judgeRelation } from './relation';
|
||||
import { SyncContext } from './SyncRowStore';
|
||||
|
||||
export function translateCreateDataToFilter<ED extends EntityDict & BaseEntityDict, T extends keyof ED> (
|
||||
export function translateCreateDataToFilter<ED extends EntityDict & BaseEntityDict, T extends keyof ED>(
|
||||
schema: StorageSchema<ED>,
|
||||
entity: T,
|
||||
data: ED[T]['CreateSingle']['data'],
|
||||
|
|
@ -53,9 +53,24 @@ function addFilterSegment<ED extends EntityDict & BaseEntityDict, T extends keyo
|
|||
if (!filter[attr]) {
|
||||
filter[attr] = value;
|
||||
}
|
||||
// 只优化一种情况,就是两个都等值且相等
|
||||
// 优化两个都等值且相等
|
||||
else if (filter[attr] === value) {
|
||||
|
||||
}
|
||||
// value定义的查询被当前查询包含
|
||||
else if (contains(entity, schema, {
|
||||
[attr]: value,
|
||||
}, {
|
||||
[attr]: filter[attr],
|
||||
}) === true) {
|
||||
filter[attr] = value;
|
||||
}
|
||||
// 当前查询被value所定义的查询包含
|
||||
else if (contains(entity, schema, {
|
||||
[attr]: filter[attr],
|
||||
}, {
|
||||
[attr]: value
|
||||
}) == true) {
|
||||
}
|
||||
else {
|
||||
addIntoAnd({
|
||||
|
|
@ -1194,21 +1209,25 @@ function judgeFilterSingleAttrRelation<ED extends EntityDict & BaseEntityDict, T
|
|||
return;
|
||||
}
|
||||
|
||||
/** 判断filter条件对compared条件是否相容或相斥
|
||||
/**
|
||||
* 根据filter对compared查询的各个条件进行逐项分析
|
||||
* @param entity
|
||||
* @param schema
|
||||
* @param filter
|
||||
* @param compared
|
||||
* @param contained: true代表判定filter包容compared(filter的查询结果是compared查询结果的子集), false代表判定filter与compared相斥(filter的查询结果与compared没有交集)
|
||||
* @returns 返回true说明肯定相容(相斥),返回false说明无法判定相容(相斥),返回DeducedFilterCombination说明需要进一步判断此推断的条件
|
||||
* @param compared
|
||||
* @param contained
|
||||
* @returns
|
||||
* sureAttributes中包含被判定肯定相容或肯定不相斥的属性(不用再继续判定了)
|
||||
* uncertainAttributes中包含的是无法判定结果的属性
|
||||
* totalAndDeducedFilters包含的是判定过程中推论的相容的充分条件(and关系)
|
||||
* totalOrDeducedFilters包含的是判定过程中推论的相斥的充分条件(or关系)
|
||||
*/
|
||||
function judgeFilterRelation<ED extends EntityDict & BaseEntityDict, T extends keyof ED>(
|
||||
export function analyzeFilterRelation<ED extends EntityDict & BaseEntityDict, T extends keyof ED>(
|
||||
entity: T,
|
||||
schema: StorageSchema<ED>,
|
||||
filter: NonNullable<ED[T]['Selection']['filter']>,
|
||||
compared: NonNullable<ED[T]['Selection']['filter']>,
|
||||
contained: boolean): boolean | DeducedFilterCombination<ED> {
|
||||
|
||||
contained: boolean) {
|
||||
const totalAndDeducedFilters: (DeducedFilterCombination<ED> | DeducedFilter<ED, T>)[] = [];
|
||||
const totalOrDeducedFilters: (DeducedFilterCombination<ED> | DeducedFilter<ED, T>)[] = [];
|
||||
const uncertainAttributes: string[] = [];
|
||||
|
|
@ -1405,6 +1424,40 @@ function judgeFilterRelation<ED extends EntityDict & BaseEntityDict, T extends k
|
|||
}
|
||||
}
|
||||
|
||||
return {
|
||||
totalAndDeducedFilters,
|
||||
totalOrDeducedFilters,
|
||||
uncertainAttributes,
|
||||
sureAttributes,
|
||||
};
|
||||
}
|
||||
/** 判断filter条件对compared条件是否相容或相斥
|
||||
* @param entity
|
||||
* @param schema
|
||||
* @param filter
|
||||
* @param compared
|
||||
* @param contained: true代表判定filter包容compared(filter的查询结果是compared查询结果的子集), false代表判定filter与compared相斥(filter的查询结果与compared没有交集)
|
||||
* @returns 返回true说明肯定相容(相斥),返回false说明无法判定相容(相斥),返回DeducedFilterCombination说明需要进一步判断此推断的条件
|
||||
*/
|
||||
function judgeFilterRelation<ED extends EntityDict & BaseEntityDict, T extends keyof ED>(
|
||||
entity: T,
|
||||
schema: StorageSchema<ED>,
|
||||
filter: NonNullable<ED[T]['Selection']['filter']>,
|
||||
compared: NonNullable<ED[T]['Selection']['filter']>,
|
||||
contained: boolean): boolean | DeducedFilterCombination<ED> {
|
||||
|
||||
const result = analyzeFilterRelation(entity, schema, filter, compared, contained);
|
||||
if (typeof result === 'boolean') {
|
||||
return result;
|
||||
}
|
||||
|
||||
const {
|
||||
sureAttributes,
|
||||
uncertainAttributes,
|
||||
totalAndDeducedFilters,
|
||||
totalOrDeducedFilters,
|
||||
} = result;
|
||||
|
||||
if (contained) {
|
||||
if (sureAttributes.length === Object.keys(compared).length) {
|
||||
return true;
|
||||
|
|
@ -1457,7 +1510,7 @@ function judgeFilterRelation<ED extends EntityDict & BaseEntityDict, T extends k
|
|||
* @param contained
|
||||
* @returns
|
||||
*/
|
||||
function contains<ED extends EntityDict & BaseEntityDict, T extends keyof ED>(
|
||||
export function contains<ED extends EntityDict & BaseEntityDict, T extends keyof ED>(
|
||||
entity: T,
|
||||
schema: StorageSchema<ED>,
|
||||
filter: ED[T]['Selection']['filter'],
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ export type ActionDef<A extends Action, S extends State> = {
|
|||
is?: S,
|
||||
};
|
||||
|
||||
export type ActionDictOfEntityDict<E extends EntityDict> = {
|
||||
export type ActionDefDict<E extends EntityDict> = {
|
||||
[T in keyof E]?: {
|
||||
[A in keyof E[T]['OpSchema']]?: ActionDef<string, string>;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,29 +1,37 @@
|
|||
// 将项目的所有配置规范化到一起(未完成)by Xc 20240207
|
||||
import { AuthDeduceRelationMap, EntityDict } from './Entity';
|
||||
import { EntityDict as BaseEntityDict } from "../base-app-domain";
|
||||
import { AsyncContext } from '../store/AsyncRowStore';
|
||||
import { SyncConfig } from "./Sync";
|
||||
import { AttrUpdateMatrix } from './EntityDesc';
|
||||
import { ActionDefDict } from './Action';
|
||||
import { StyleDict } from './Style';
|
||||
import { Exportation, Importation } from './Port';
|
||||
|
||||
/**
|
||||
* 后台环境配置
|
||||
* 后台配置
|
||||
*/
|
||||
export type ServerConfiguration = {
|
||||
export type ServerConfiguration<ED extends BaseEntityDict & EntityDict, Cxt extends AsyncContext<ED>> = {
|
||||
database: {
|
||||
type: 'mysql',
|
||||
host: string;
|
||||
database: string;
|
||||
port: number;
|
||||
port?: number;
|
||||
user: string;
|
||||
password?: string;
|
||||
connectionLimit: number;
|
||||
charset: "utf8mb4_general_ci",
|
||||
},
|
||||
http: {
|
||||
// 监听端口号
|
||||
port: number;
|
||||
}
|
||||
workDir: {
|
||||
path: string;
|
||||
},
|
||||
sync?: SyncConfig<ED, Cxt>;
|
||||
};
|
||||
|
||||
/**
|
||||
* 前后台共用的配置
|
||||
* 前后台访问配置
|
||||
*/
|
||||
export type ProjectConfiguration = {
|
||||
export type AccessConfiguration = {
|
||||
// 各种接口的路由前缀(一般不建议配置)
|
||||
routerPrefixes?: {
|
||||
// 默认aspect
|
||||
|
|
@ -40,9 +48,45 @@ export type ProjectConfiguration = {
|
|||
|
||||
// 默认bridge
|
||||
bridge?: string;
|
||||
}
|
||||
}
|
||||
},
|
||||
http: {
|
||||
// 后台所在域名
|
||||
hostname: string;
|
||||
|
||||
// 监听端口号
|
||||
port?: number;
|
||||
|
||||
// 是否配用https(nginx)
|
||||
ssl?: boolean;
|
||||
|
||||
// nginx proxy path
|
||||
path?: string;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* 业务逻辑的通用配置
|
||||
*/
|
||||
export type CommonConfiguration<ED extends BaseEntityDict & EntityDict> = {
|
||||
attrUpdateMatrix: AttrUpdateMatrix<ED>;
|
||||
actionDefDict: ActionDefDict<ED>;
|
||||
authDeduceRelationMap: AuthDeduceRelationMap<ED>;
|
||||
selectFreeEntities?: (keyof ED)[];
|
||||
updateFreeDict?: {
|
||||
[A in keyof ED]?: string[];
|
||||
};
|
||||
cacheSavedEntities?: (keyof ED)[];
|
||||
cacheKeepFreshPeriod?: number;
|
||||
};
|
||||
|
||||
export type DependencyConfiguration = string[];
|
||||
|
||||
/**
|
||||
* 渲染相关定义
|
||||
*/
|
||||
export type RenderConfiguration<ED extends BaseEntityDict & EntityDict> = {
|
||||
styleDict: StyleDict<ED>;
|
||||
};
|
||||
|
||||
/**
|
||||
* 编译环境配置
|
||||
|
|
|
|||
|
|
@ -61,4 +61,9 @@ export interface Connector<ED extends EntityDict, FrontCxt extends SyncContext<E
|
|||
url: string;
|
||||
headers?: Record<string, string>;
|
||||
};
|
||||
|
||||
// 获得所有数据(测试环境用)
|
||||
getFullData: (keys?: (keyof ED)[]) => Promise<{
|
||||
[T in keyof ED]?: ED[T]['OpSchema'][];
|
||||
}>
|
||||
}
|
||||
|
|
@ -1,17 +1,26 @@
|
|||
import { LocaleDef } from './Locale';
|
||||
import { Index } from './Storage';
|
||||
import { EntityShape, Configuration, EntityDict } from './Entity';
|
||||
import { StyleDesc } from './Style';
|
||||
|
||||
export type EntityDesc<
|
||||
Schema extends EntityShape,
|
||||
Action extends string = '',
|
||||
Relation extends string = '',
|
||||
V extends Record<string, string> = {}> = {
|
||||
locales: LocaleDef<Schema, Action, Relation, V>;
|
||||
V extends Record<string, string> = { ['##oak_illegal##']: '' }> = {
|
||||
locales: LocaleDef<Schema, Action, Relation, keyof V extends '##oak_illegal##' ? {} : V>;
|
||||
indexes?: Index<Schema>[];
|
||||
configuration?: Configuration;
|
||||
recursiveDepth?: number;
|
||||
};
|
||||
} & (
|
||||
Action extends '' ? (
|
||||
keyof V extends '##oak_illegal##' ? {} : {
|
||||
style: StyleDesc<Action, V>;
|
||||
}
|
||||
) : {
|
||||
style: StyleDesc<Action, V>;
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
// 定义对象的更新约束,在什么状态下可以(通过什么动作)更新什么属性
|
||||
|
|
|
|||
|
|
@ -1,12 +1,45 @@
|
|||
import { EntityDict } from './Entity';
|
||||
import { EntityDict, GeneralEntityShape } from './Entity';
|
||||
import { EntityDict as BaseEntityDict } from '../base-app-domain';
|
||||
|
||||
type ThemeColor = 'default' | 'success' | 'warning' | 'error' | 'primary' | 'danger';
|
||||
type Color = `#${string}`;
|
||||
type IconName = string;
|
||||
|
||||
export type ColorDict<ED extends BaseEntityDict & EntityDict> = {
|
||||
[T in keyof ED]?: {
|
||||
[A in keyof ED[T]['OpSchema']]?: {
|
||||
[E in ED[T]['OpSchema'][A]]?: ThemeColor | `#${string}`;
|
||||
export type StyleDesc<Action extends string = '', V extends Record<string, string> = { ['##oak_illegal##']: '' }> = Action extends '' ? (
|
||||
keyof V extends '##oak_illegal##' ? {} : {
|
||||
color: {
|
||||
[A in keyof V]: {
|
||||
[E in V[A]]: Color;
|
||||
};
|
||||
};
|
||||
}
|
||||
) : (
|
||||
keyof V extends '##oak_illegal##' ? {
|
||||
icon: {
|
||||
[A in Action]?: IconName;
|
||||
};
|
||||
} : {
|
||||
icon: {
|
||||
[A in Action]?: IconName;
|
||||
};
|
||||
color: {
|
||||
[A in keyof V]: {
|
||||
[E in V[A]]: Color;
|
||||
};
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
export type StyleDef<ED extends GeneralEntityShape, Action extends string> = {
|
||||
color?: {
|
||||
[A in keyof ED]?: {
|
||||
[E in ED[A]]?: Color;
|
||||
};
|
||||
};
|
||||
icon?: {
|
||||
[A in Action]?: IconName;
|
||||
};
|
||||
};
|
||||
|
||||
export type StyleDict<ED extends BaseEntityDict & EntityDict> = {
|
||||
[T in keyof ED]?: StyleDef<ED[T]['OpSchema'], ED[T]['Action']>;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -4,15 +4,9 @@ import { Stream } from 'stream';
|
|||
import URL from 'url';
|
||||
import { SyncContext } from '../store/SyncRowStore';
|
||||
import { Connector, EntityDict, OakException, OakNetworkException, OakServerProxyException, OpRecord } from "../types";
|
||||
import { AccessConfiguration } from '../types/Configuration';
|
||||
|
||||
type ServerOption = {
|
||||
protocol: string;
|
||||
hostname: string;
|
||||
port?: number;
|
||||
apiPath?: string;
|
||||
};
|
||||
|
||||
export class SimpleConnector<ED extends EntityDict, FrontCxt extends SyncContext<ED>>
|
||||
export default class SimpleConnector<ED extends EntityDict, FrontCxt extends SyncContext<ED>>
|
||||
implements Connector<ED, FrontCxt>
|
||||
{
|
||||
static ASPECT_ROUTER = '/aspect';
|
||||
|
|
@ -20,29 +14,38 @@ export class SimpleConnector<ED extends EntityDict, FrontCxt extends SyncContext
|
|||
static SUBSCRIBE_ROUTER = process.env.OAK_SUBSCRIBE_ROUTER || '/subscribe';
|
||||
static SUBSCRIBE_POINT_ROUTER = '/subscribePoint';
|
||||
static ENDPOINT_ROUTER = '/endpoint';
|
||||
private serverUrl: string;
|
||||
private serverAspectUrl: string;
|
||||
private serverBridgeUrl: string;
|
||||
private serverSubscribePointUrl: string;
|
||||
private option: ServerOption;
|
||||
private configuration: AccessConfiguration;
|
||||
private makeException: (exceptionData: any) => OakException<ED>;
|
||||
|
||||
constructor(
|
||||
option: ServerOption,
|
||||
configuration: AccessConfiguration,
|
||||
makeException: (exceptionData: any) => OakException<ED>
|
||||
) {
|
||||
this.option = option;
|
||||
const { protocol, hostname, port, apiPath } = option;
|
||||
this.configuration = configuration;
|
||||
const { routerPrefixes, http } = configuration;
|
||||
const { ssl, hostname, port, path } = http;
|
||||
const protocol = ssl ? 'https:' : 'http';
|
||||
let serverUrl = `${protocol}//${hostname}`;
|
||||
this.serverUrl = serverUrl;
|
||||
if (typeof port === 'number') {
|
||||
serverUrl += `:${port}`;
|
||||
}
|
||||
if (apiPath) {
|
||||
assert(apiPath.startsWith('/'), 'apiPath前缀必须存在/');
|
||||
serverUrl += apiPath;
|
||||
if (path) {
|
||||
if (path.startsWith('/')) {
|
||||
serverUrl += path;
|
||||
}
|
||||
else {
|
||||
serverUrl += `/${path}`;
|
||||
}
|
||||
}
|
||||
this.serverAspectUrl = `${serverUrl}${SimpleConnector.ASPECT_ROUTER}`;
|
||||
this.serverBridgeUrl = `${serverUrl}${SimpleConnector.BRIDGE_ROUTER}`;
|
||||
this.serverSubscribePointUrl = `${serverUrl}${SimpleConnector.SUBSCRIBE_POINT_ROUTER}`;
|
||||
this.serverAspectUrl = `${serverUrl}${routerPrefixes?.aspect || SimpleConnector.ASPECT_ROUTER}`;
|
||||
this.serverBridgeUrl = `${serverUrl}${routerPrefixes?.bridge || SimpleConnector.BRIDGE_ROUTER}`;
|
||||
this.serverSubscribePointUrl = `${serverUrl}${routerPrefixes?.getSubscribePoint ||
|
||||
SimpleConnector.SUBSCRIBE_POINT_ROUTER}`;
|
||||
this.makeException = makeException;
|
||||
}
|
||||
|
||||
|
|
@ -127,15 +130,15 @@ export class SimpleConnector<ED extends EntityDict, FrontCxt extends SyncContext
|
|||
}
|
||||
|
||||
getRouter(): string {
|
||||
return SimpleConnector.ASPECT_ROUTER;
|
||||
return this.configuration.routerPrefixes?.aspect || SimpleConnector.ASPECT_ROUTER;
|
||||
}
|
||||
|
||||
getSubscribeRouter(): string {
|
||||
return SimpleConnector.SUBSCRIBE_ROUTER;
|
||||
return this.configuration.routerPrefixes?.subscribe || SimpleConnector.SUBSCRIBE_ROUTER;
|
||||
}
|
||||
|
||||
getSubscribePointRouter(): string {
|
||||
return SimpleConnector.SUBSCRIBE_POINT_ROUTER;
|
||||
return this.configuration.routerPrefixes?.getSubscribePoint || SimpleConnector.SUBSCRIBE_POINT_ROUTER;
|
||||
}
|
||||
|
||||
async getSubscribePoint() {
|
||||
|
|
@ -160,8 +163,7 @@ export class SimpleConnector<ED extends EntityDict, FrontCxt extends SyncContext
|
|||
if (responseType?.toLocaleLowerCase().match(/application\/json/i)) {
|
||||
const { url, path, port, namespace } = await response.json();
|
||||
|
||||
let url2 =
|
||||
url || `${this.option.protocol}//${this.option.hostname}`;
|
||||
let url2 = url || this.serverUrl;
|
||||
assert(port);
|
||||
url2 += `:${port}`;
|
||||
if (namespace) {
|
||||
|
|
@ -178,7 +180,7 @@ export class SimpleConnector<ED extends EntityDict, FrontCxt extends SyncContext
|
|||
}
|
||||
|
||||
getEndpointRouter(): string {
|
||||
return SimpleConnector.ENDPOINT_ROUTER;
|
||||
return this.configuration.routerPrefixes?.endpoint || SimpleConnector.ENDPOINT_ROUTER;
|
||||
}
|
||||
|
||||
parseRequest(headers: IncomingHttpHeaders, body?: any, files?: any) {
|
||||
|
|
@ -266,4 +268,9 @@ export class SimpleConnector<ED extends EntityDict, FrontCxt extends SyncContext
|
|||
headers: headers && JSON.parse(headers),
|
||||
};
|
||||
}
|
||||
|
||||
async getFullData() {
|
||||
console.error('前后台模式下暂时不支持此操作,请到数据库查看数据');
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ import intersection from 'lodash/intersection';
|
|||
import intersectionBy from 'lodash/intersectionBy';
|
||||
import omit from 'lodash/omit';
|
||||
import merge from 'lodash/merge';
|
||||
import mergeWith from 'lodash/mergeWith';
|
||||
import cloneDeep from 'lodash/cloneDeep';
|
||||
import pick from 'lodash/pick';
|
||||
import isEqual from 'lodash/isEqual';
|
||||
|
|
@ -21,6 +22,32 @@ import differenceBy from 'lodash/differenceBy';
|
|||
import groupBy from 'lodash/groupBy';
|
||||
import unionBy from 'lodash/unionBy';
|
||||
import pullAll from 'lodash/pullAll';
|
||||
import assert from 'assert';
|
||||
|
||||
/**
|
||||
* merge两个对象,遇到array时使用连接合并
|
||||
* @param object
|
||||
* @param source
|
||||
* @returns
|
||||
*/
|
||||
function mergeConcatArray(object: any, source: any) {
|
||||
if (object instanceof Array) {
|
||||
assert(source instanceof Array, '合并的对象必须结构一致');
|
||||
return uniq(object.concat(source));
|
||||
}
|
||||
return mergeWith(object, source, (objValue, srcValue) => {
|
||||
if (objValue instanceof Array) {
|
||||
assert(srcValue instanceof Array, '合并的对象必须结构一致');
|
||||
return uniq(objValue.concat(srcValue));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function mergeConcatMany<T>(array: Array<T>) {
|
||||
return array.reduce(
|
||||
(prev, current) => mergeConcatArray(prev, current)
|
||||
) as T;
|
||||
}
|
||||
|
||||
export {
|
||||
unset,
|
||||
|
|
@ -33,6 +60,9 @@ export {
|
|||
intersectionBy,
|
||||
omit,
|
||||
merge,
|
||||
mergeWith,
|
||||
mergeConcatArray,
|
||||
mergeConcatMany,
|
||||
cloneDeep,
|
||||
pick,
|
||||
isEqual,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,36 @@
|
|||
import { mergeConcatArray } from '../../utils/lodash';
|
||||
import { AsyncContext } from "../../store/AsyncRowStore";
|
||||
import { SyncContext } from "../../store/SyncRowStore";
|
||||
import { EntityDict } from "../../types/Entity";
|
||||
import { EntityDict as BaseEntityDict } from '../../base-app-domain';
|
||||
import { Checker } from '../../types/Auth';
|
||||
import { CommonConfiguration } from '../../types/Configuration';
|
||||
import assert from 'assert';
|
||||
|
||||
/**
|
||||
* 合并引入模块中的checker和common
|
||||
* @param modules
|
||||
* @returns
|
||||
*/
|
||||
export default function combineBaseModules<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED> | SyncContext<ED>>(...modules: string[]) {
|
||||
// 合并模块中的checker/common
|
||||
return modules.map(
|
||||
(module) => {
|
||||
const checkers = require(`${module}/lib/checkers`).default;
|
||||
const common = require(`${module}/lib/configuration`).default;
|
||||
assert(checkers instanceof Array, `${module}模块中的checkers不是数组`);
|
||||
assert(typeof common === 'object', `${module}模块中的common配置不是对象`);
|
||||
|
||||
return {
|
||||
checkers,
|
||||
common,
|
||||
};
|
||||
}
|
||||
).reduce((prev, current) => ({
|
||||
checkers: mergeConcatArray(prev.checkers, current.checkers),
|
||||
common: mergeConcatArray(prev.common, current.common),
|
||||
})) as {
|
||||
checkers: Array<Checker<ED, keyof ED, Cxt>>;
|
||||
common: CommonConfiguration<ED>;
|
||||
};
|
||||
}
|
||||
|
|
@ -0,0 +1,77 @@
|
|||
import combineBaseModules from './combine.common';
|
||||
import { intersection, merge, mergeConcatArray } from '../../utils/lodash';
|
||||
import { AsyncContext } from "../../store/AsyncRowStore";
|
||||
import { SyncContext } from "../../store/SyncRowStore";
|
||||
import { EntityDict } from "../../types/Entity";
|
||||
import { EntityDict as BaseEntityDict } from '../../base-app-domain';
|
||||
import assert from 'assert';
|
||||
import { Aspect, Exportation, Importation, Routine, Timer, Trigger, Watcher } from '../../types';
|
||||
import { RenderConfiguration } from '../../types/Configuration';
|
||||
|
||||
|
||||
export default function combineModuleDev<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(...modules: string[]) {
|
||||
const { checkers, common } = combineBaseModules<ED, Cxt & FrontCxt>(...modules);
|
||||
|
||||
const others = modules.map(
|
||||
(module) => ({
|
||||
triggers: require(`${module}/lib/triggers`).default,
|
||||
aspectDict: require(`${module}/lib/aspects`).default,
|
||||
watchers: require(`${module}/lib/watchers`).default,
|
||||
timers: require(`${module}/lib/timers`).default,
|
||||
startRoutines: require(`${module}/lib/routines/start`).default,
|
||||
importations: require(`${module}/lib/ports`).importations,
|
||||
exportations: require(`${module}/lib/ports`).exportations,
|
||||
data: require(`${module}/lib/data`).default,
|
||||
})
|
||||
).reduce(
|
||||
(prev, current, index) => {
|
||||
const check = (module: typeof prev, name: string) => {
|
||||
assert(typeof module.aspectDict === 'object', `${name}模块中的aspectDict不是对象`);
|
||||
assert(typeof module.data === 'object', `${name}模块中的data不是对象`);
|
||||
assert(module.exportations instanceof Array, `${name}模块中的exportations不是数组`);
|
||||
assert(module.importations instanceof Array, `${name}模块中的importations不是数组`);
|
||||
assert(module.watchers instanceof Array, `${name}模块中的watchers不是数组`);
|
||||
assert(module.timers instanceof Array, `${name}模块中的timers不是数组`);
|
||||
assert(module.triggers instanceof Array, `${name}模块中的triggers不是数组`);
|
||||
assert(module.startRoutines instanceof Array, `${name}模块中的startRoutines不是数组`);
|
||||
};
|
||||
|
||||
if (index === 1) {
|
||||
check(prev, modules[0]);
|
||||
}
|
||||
check(current, modules[index]);
|
||||
// aspectDict中不应当有同名对象
|
||||
const its = intersection(Object.keys(prev.aspectDict), Object.keys(current.aspectDict));
|
||||
if (its.length > 0) {
|
||||
throw new Error(`模块${modules[index]}的aspectDict中,存在和其它模块同步的aspect【${its.join(',')}】,请正确处理`);
|
||||
}
|
||||
return {
|
||||
aspectDict: mergeConcatArray(prev.aspectDict, current.aspectDict) ,
|
||||
data: mergeConcatArray(prev.data, current.data),
|
||||
importations: mergeConcatArray(prev.importations, current.importations),
|
||||
exportations: mergeConcatArray(prev.exportations, current.exportations),
|
||||
watchers: mergeConcatArray(prev.watchers, current.watchers),
|
||||
timers: merge(prev.timers, current.timers),
|
||||
startRoutines: merge(prev.startRoutines, current.startRoutines),
|
||||
triggers: merge(prev.triggers, current.triggers),
|
||||
};
|
||||
}
|
||||
) as {
|
||||
aspectDict: Record<string, Aspect<ED, Cxt>>;
|
||||
data: {
|
||||
[T in keyof ED]?: Array<ED[T]['OpSchema']>;
|
||||
};
|
||||
importations: Importation<ED, keyof ED, string, Cxt>;
|
||||
exportations: Exportation<ED, keyof ED, string, Cxt>;
|
||||
watchers: Watcher<ED, keyof ED, Cxt>[];
|
||||
timers: Timer<ED, keyof ED, Cxt>[];
|
||||
startRoutines: Routine<ED, keyof ED, Cxt>[];
|
||||
triggers: Trigger<ED, keyof ED, Cxt>[];
|
||||
};
|
||||
|
||||
return {
|
||||
checkers,
|
||||
common,
|
||||
...others,
|
||||
};
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
import combineBaseModules from './combine.common';
|
||||
import { AsyncContext } from "../../store/AsyncRowStore";
|
||||
import { SyncContext } from "../../store/SyncRowStore";
|
||||
import { EntityDict } from "../../types/Entity";
|
||||
import { EntityDict as BaseEntityDict } from '../../base-app-domain';
|
||||
|
||||
|
||||
export default function combineModuleDev<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(...modules: string[]) {
|
||||
return combineBaseModules<ED, Cxt & FrontCxt>(...modules);
|
||||
}
|
||||
|
|
@ -0,0 +1,76 @@
|
|||
import combineBaseModules from './combine.common';
|
||||
import { intersection, merge, mergeConcatArray } from '../../utils/lodash';
|
||||
import { AsyncContext } from "../../store/AsyncRowStore";
|
||||
import { SyncContext } from "../../store/SyncRowStore";
|
||||
import { EntityDict } from "../../types/Entity";
|
||||
import { EntityDict as BaseEntityDict } from '../../base-app-domain';
|
||||
import assert from 'assert';
|
||||
import { Aspect, Exportation, Importation, Routine, Timer, Trigger, Watcher } from '../../types';
|
||||
|
||||
|
||||
export default function combineModuleServer<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>, FrontCxt extends SyncContext<ED>>(...modules: string[]) {
|
||||
const { checkers, common } = combineBaseModules<ED, Cxt & FrontCxt>(...modules);
|
||||
|
||||
const others = modules.map(
|
||||
(module) => ({
|
||||
triggers: require(`${module}/lib/triggers`).default,
|
||||
aspectDict: require(`${module}/lib/aspects`).default,
|
||||
watchers: require(`${module}/lib/watchers`).default,
|
||||
timers: require(`${module}/lib/timers`).default,
|
||||
startRoutines: require(`${module}/lib/routines/start`).default,
|
||||
importations: require(`${module}/lib/ports`).importations,
|
||||
exportations: require(`${module}/lib/ports`).exportations,
|
||||
data: require(`${module}/lib/data`).default,
|
||||
})
|
||||
).reduce(
|
||||
(prev, current, index) => {
|
||||
const check = (module: typeof prev, name: string) => {
|
||||
assert(typeof module.aspectDict === 'object', `${name}模块中的aspectDict不是对象`);
|
||||
assert(typeof module.data === 'object', `${name}模块中的data不是对象`);
|
||||
assert(module.exportations instanceof Array, `${name}模块中的exportations不是数组`);
|
||||
assert(module.importations instanceof Array, `${name}模块中的importations不是数组`);
|
||||
assert(module.watchers instanceof Array, `${name}模块中的watchers不是数组`);
|
||||
assert(module.timers instanceof Array, `${name}模块中的timers不是数组`);
|
||||
assert(module.triggers instanceof Array, `${name}模块中的triggers不是数组`);
|
||||
assert(module.startRoutines instanceof Array, `${name}模块中的startRoutines不是数组`);
|
||||
};
|
||||
|
||||
if (index === 1) {
|
||||
check(prev, modules[0]);
|
||||
}
|
||||
check(current, modules[index]);
|
||||
// aspectDict中不应当有同名对象
|
||||
const its = intersection(Object.keys(prev.aspectDict), Object.keys(current.aspectDict));
|
||||
if (its.length > 0) {
|
||||
throw new Error(`模块${modules[index]}的aspectDict中,存在和其它模块同步的aspect【${its.join(',')}】,请正确处理`);
|
||||
}
|
||||
return {
|
||||
aspectDict: mergeConcatArray(prev.aspectDict, current.aspectDict) ,
|
||||
data: mergeConcatArray(prev.data, current.data),
|
||||
importations: mergeConcatArray(prev.importations, current.importations),
|
||||
exportations: mergeConcatArray(prev.exportations, current.exportations),
|
||||
watchers: mergeConcatArray(prev.watchers, current.watchers),
|
||||
timers: merge(prev.timers, current.timers),
|
||||
startRoutines: merge(prev.startRoutines, current.startRoutines),
|
||||
triggers: merge(prev.triggers, current.triggers),
|
||||
};
|
||||
}
|
||||
) as {
|
||||
aspectDict: Record<string, Aspect<ED, Cxt>>;
|
||||
data: {
|
||||
[T in keyof ED]?: Array<ED[T]['OpSchema']>;
|
||||
};
|
||||
importations: Importation<ED, keyof ED, string, Cxt>;
|
||||
exportations: Exportation<ED, keyof ED, string, Cxt>;
|
||||
watchers: Watcher<ED, keyof ED, Cxt>[];
|
||||
timers: Timer<ED, keyof ED, Cxt>[];
|
||||
startRoutines: Routine<ED, keyof ED, Cxt>[];
|
||||
triggers: Trigger<ED, keyof ED, Cxt>[];
|
||||
};
|
||||
|
||||
return {
|
||||
checkers,
|
||||
common,
|
||||
...others,
|
||||
};
|
||||
}
|
||||
|
|
@ -0,0 +1 @@
|
|||
export * from './combine.dev';
|
||||
Loading…
Reference in New Issue