Merge branch 'dev' of gitea.51mars.com:Oak-Team/oak-domain into dev

This commit is contained in:
Xu Chang 2024-11-04 21:04:58 +08:00
commit ce4400aeef
3 changed files with 88 additions and 87 deletions

View File

@ -1,6 +1,6 @@
import PathLib from 'path';
import assert from 'assert';
import { writeFileSync, readdirSync, mkdirSync, existsSync } from 'fs';
import { writeFileSync, readdirSync, mkdirSync, existsSync, realpathSync } from 'fs';
import { emptydirSync } from 'fs-extra';
import { assign, cloneDeep, difference, identity, intersection, keys, pull, uniq, uniqBy, flatten, result } from 'lodash';
import * as ts from 'typescript';
@ -286,7 +286,8 @@ function checkActionDefNameConsistent(filename: string, actionDefNode: ts.Variab
const aName = actionNode.typeName.text.slice(0, actionNode.typeName.text.length - 6);
const sName = stateNode.typeName.text.slice(0, stateNode.typeName.text.length - 5);
assert(adfName === aName && aName === sName, `文件${filename}中的ActionDef${name.text}中ActionDef, Action和State的命名规则不一致`);
assert(adfName === aName && aName === sName, `文件${filename}中的ActionDef${name.text}中ActionDef, Action和State的命名规则不一致 需要
${adfName}ActionDef, ${adfName}Action, ${adfName}State`);
}
@ -432,7 +433,7 @@ function tryGetStringLiteralValues(
}
if (['state', 'action'].includes(obj)) {
assert(values.length > 0);
assert(values.length > 0, `${filename}中的${obj} ${node.typeName.getText()}未定义`);
const importDeclartion = declaration.parent.parent.parent;
assert(ts.isImportDeclaration(importDeclartion));
@ -586,7 +587,7 @@ function dealWithActionDefInitializer(moduleName: string, initializer: ts.Expres
const importDeclartion = declaration.parent.parent.parent;
addImportedFrom(moduleName, identifier.text, importDeclartion);
addImportedFrom(moduleName, identifier.text, importDeclartion as ts.ImportDeclaration );
}
else {
// 本地定义的actionDef不用处理
@ -785,7 +786,7 @@ function analyzeImportDeclaration(
} */
}
}
/**
* path下的filename文件中import了fileSpecifierPath
* 这里关键是要处理形如file:的依赖声明
@ -794,58 +795,58 @@ function analyzeImportDeclaration(
* @param filename
* @returns
*/
function getImportedFilePath(path: string, fileSpecifierPath: string, filename: string) {
let importedFilepath = '';
// function getImportedFilePath(path: string, fileSpecifierPath: string, filename: string) {
// let importedFilepath = '';
const getExistedFileName = () => {
if (existsSync(`${importedFilepath}.ts`)) {
return `${importedFilepath}.ts`;
}
else if (existsSync(`${importedFilepath}.d.ts`)) {
return `${importedFilepath}.d.ts`;
}
return '';
};
// const getExistedFileName = () => {
// if (existsSync(`${importedFilepath}.ts`)) {
// return `${importedFilepath}.ts`;
// }
// else if (existsSync(`${importedFilepath}.d.ts`)) {
// return `${importedFilepath}.d.ts`;
// }
// return '';
// };
if (fileSpecifierPath.startsWith('.')) {
importedFilepath = PathLib.join(path, fileSpecifierPath);
const importedFilename = getExistedFileName();
assert(importedFilename, `${filename}」中import路径${fileSpecifierPath}找不到对应的声明`);
return importedFilename;
}
else {
const cwd = process.cwd();
// if (fileSpecifierPath.startsWith('.')) {
// importedFilepath = PathLib.join(path, fileSpecifierPath);
// const importedFilename = getExistedFileName();
// assert(importedFilename, `「${filename}」中import路径${fileSpecifierPath}找不到对应的声明`);
// return importedFilename;
// }
// else {
// const cwd = process.cwd();
const fileSpecifierPaths = fileSpecifierPath.split('/');
const moduleName = fileSpecifierPaths[0] || fileSpecifierPaths[1];
assert(moduleName);
// 从path向外找package.json -> node_modules直至找到fileSpecifier
const paths = path.split('/');
for (let iter = paths.length; iter >= 0; iter--) {
const paths2 = paths.slice(0, iter);
const pkgJsonPath = PathLib.join(cwd, ...paths2, 'package.json');
if (existsSync(pkgJsonPath)) {
const pkgJson = require(pkgJsonPath);
if (pkgJson.dependencies?.hasOwnProperty(moduleName)) {
const dependentPath = pkgJson.dependencies[moduleName] as string;
if (dependentPath.trimStart().startsWith('file:')) {
const dependentFilePath = dependentPath.trimStart().slice(5);
importedFilepath = PathLib.join(pkgJsonPath, '..', dependentFilePath, ...(fileSpecifierPaths[0] ? fileSpecifierPaths.slice(1) : (fileSpecifierPaths.slice(2))));
}
else {
importedFilepath = PathLib.join(pkgJsonPath, '..', 'node_modules', fileSpecifierPath);
}
const importedFilename = getExistedFileName();
if (importedFilename) {
return importedFilename;
}
}
}
}
// const fileSpecifierPaths = fileSpecifierPath.split('/');
// const moduleName = fileSpecifierPaths[0] || fileSpecifierPaths[1];
// assert(moduleName);
// // 从path向外找package.json -> node_modules直至找到fileSpecifier
// const paths = path.split('/');
// for (let iter = paths.length; iter >= 0; iter--) {
// const paths2 = paths.slice(0, iter);
// const pkgJsonPath = PathLib.join(cwd, ...paths2, 'package.json');
// if (existsSync(pkgJsonPath)) {
// const pkgJson = require(pkgJsonPath);
// if (pkgJson.dependencies?.hasOwnProperty(moduleName)) {
// const dependentPath = pkgJson.dependencies[moduleName] as string;
// if (dependentPath.trimStart().startsWith('file:')) {
// const dependentFilePath = dependentPath.trimStart().slice(5);
// importedFilepath = PathLib.join(pkgJsonPath, '..', dependentFilePath, ...(fileSpecifierPaths[0] ? fileSpecifierPaths.slice(1) : (fileSpecifierPaths.slice(2))));
// }
// else {
// importedFilepath = PathLib.join(pkgJsonPath, '..', 'node_modules', fileSpecifierPath);
// }
// const importedFilename = getExistedFileName();
// if (importedFilename) {
// return importedFilename;
// }
// }
// }
// }
}
assert(false, `${filename}」中import路径${fileSpecifierPath}找不到对应的声明`);
}
// }
// assert(false, `「${filename}」中import路径${fileSpecifierPath}找不到对应的声明`);
// }
function analyzeSchemaDefinition(
node: ts.InterfaceDeclaration,
@ -889,10 +890,27 @@ function analyzeSchemaDefinition(
const { text: from } = moduleSpecifier;
extendsFrom.push(from);
const importedFilename = getImportedFilePath(path, from, filename);
const sourceFile = program.getSourceFile(importedFilename);
assert(sourceFile, `${filename}」找不到相应的sourceFile${importedFilename}`);
const relativeFilename = PathLib.relative(process.cwd(), importedFilename);
// 创建编译器主机
const compilerHost = ts.createCompilerHost(program.getCompilerOptions());
// 解析模块
const resolvedModule = ts.resolveModuleName(
from,
filename,
program.getCompilerOptions(),
compilerHost
);
assert(resolvedModule.resolvedModule, "找不到module定义")
const resolvedFileName = resolvedModule.resolvedModule.resolvedFileName;
const sourceFile = program.getSourceFile(resolvedFileName);
// const importedFilename = getImportedFilePath(path, from, filename);
// const sourceFile = program.getSourceFile(importedFilename);
assert(sourceFile, `${filename}」找不到相应的sourceFile${resolvedFileName}`);
const relativeFilename = PathLib.relative(process.cwd(), resolvedFileName);
const result = analyzeReferenceSchemaFile(
moduleName,
@ -6435,7 +6453,7 @@ function outputLocale(outputDir: string, printer: ts.Printer) {
sourceFile);
const data = Function(result)();
const filename = PathLib.join(outputDir, entity, 'locales', `${lng}.json`);
writeFileSync(filename, JSON.stringify(data), { flag: 'w' });
writeFileSync(filename, JSON.stringify(data, null, 2), { flag: 'w' });
if (locales[lng]) {
locales[lng].push(entity);

View File

@ -403,6 +403,7 @@ function createAttrUpdateCheckers<ED extends EntityDict & BaseEntityDict, Cxt ex
const actions = condition.map(ele => ele?.actions).filter(ele => !!ele);
const a = actions.length > 0 && intersection(actions.flat());
if (a) {
assert(action)
if (!a.includes(action)) {
// 找到不满足的那个attr
const attrsIllegal = attrs.filter(

View File

@ -378,43 +378,25 @@ export const logTriggers: Trigger<EntityDict & BaseEntityDict, 'log', AsyncConte
return (result.oper?.remove || 0) + (result2.log?.remove || 0);
}
} as CreateTriggerInTxn<EntityDict & BaseEntityDict, 'log', AsyncContext<EntityDict & BaseEntityDict>>,
// 在删除log时除与此log关联的oper
// 在删除log时除与此log关联的oper
{
name: '当删除log时除与此log关联的oper',
name: '当删除log时除与此log关联的oper',
entity: 'log',
action: 'remove',
when: 'before',
fn: async ({ operation }, context) => {
const { filter } = operation;
assert(filter);
// 查询这次删除操作涉及到的所有log
const logs = await context.select('log', {
data: {
id: 1,
oper$log: {
$entity: 'oper',
data: {
id: 1,
},
},
},
filter,
// 查询这次删除操作涉及到的所有log
const result = await context.operate('oper', {
id: "dummy",
action: 'remove',
data: {},
filter: {
log: filter,
}
}, {});
const operIds = logs.flatMap(log => (log as BaseEntityDict['log']['Schema']).oper$log!.map(oper => oper.id));
let result = 0;
for (const operId of operIds) {
const result2 = await context.operate('oper', {
id: operId,
action: 'update',
data: {
logId: null,
},
}, {});
result += result2.oper?.update || 0;
}
return result;
return result.oper?.remove || 0;
}
} as RemoveTriggerInTxn<EntityDict & BaseEntityDict, 'log', AsyncContext<EntityDict & BaseEntityDict>>,
];