增加了编译器对import的支持
This commit is contained in:
parent
d7c8b55ebd
commit
041f2e8787
|
|
@ -1,2 +1,2 @@
|
|||
export declare function analyzeEntities(inputDir: string): void;
|
||||
export declare function analyzeEntities(inputDir: string, relativePath?: string): void;
|
||||
export declare function buildSchema(outputDir: string): void;
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ function addActionSource(moduleName, name, node) {
|
|||
var _a;
|
||||
var ast = ActionAsts[moduleName];
|
||||
var moduleSpecifier = node.moduleSpecifier;
|
||||
// 目前应该只会引用oak-domain/src/actions/action里的公共action
|
||||
// todo 目前应该只会引用oak-domain/src/actions/action里的公共action,未来如果有交叉引用这里代码要修正(如果domain中也有引用action_constants这里应该也会错)
|
||||
(0, assert_1.default)(ts.isStringLiteral(moduleSpecifier) && moduleSpecifier.text === (0, env_1.ACTION_CONSTANT_IN_OAK_DOMAIN)());
|
||||
(0, lodash_1.assign)(ast.importedFrom, (_a = {},
|
||||
_a[name.text] = node,
|
||||
|
|
@ -262,28 +262,27 @@ function dealWithActions(moduleName, filename, node, program, sourceFile, hasRel
|
|||
});
|
||||
pushStatementIntoActionAst(moduleName, factory.createVariableStatement([factory.createModifier(ts.SyntaxKind.ExportKeyword)], factory.createVariableDeclarationList([factory.createVariableDeclaration(factory.createIdentifier("actions"), undefined, undefined, factory.createArrayLiteralExpression(actionTexts.map(function (ele) { return factory.createStringLiteral(ele); }), false))], ts.NodeFlags.Const)), sourceFile);
|
||||
}
|
||||
function getEntityImported(declaration, filename) {
|
||||
/**
|
||||
* entity的引用一定要以 import { Schema as XXX } from '..../XXX'这种形式
|
||||
* @param declaration
|
||||
* @param filename
|
||||
* @returns
|
||||
*/
|
||||
function getEntityImported(declaration) {
|
||||
var moduleSpecifier = declaration.moduleSpecifier, importClause = declaration.importClause;
|
||||
var entityImported;
|
||||
if (ts.isStringLiteral(moduleSpecifier)) {
|
||||
if (moduleSpecifier.text.startsWith('./')) {
|
||||
entityImported = moduleSpecifier.text.slice(2);
|
||||
}
|
||||
else if (moduleSpecifier.text.startsWith((0, env_1.ENTITY_PATH_IN_OAK_GENERAL_BUSINESS)())) {
|
||||
entityImported = moduleSpecifier.text.slice((0, env_1.ENTITY_PATH_IN_OAK_GENERAL_BUSINESS)().length);
|
||||
}
|
||||
else if (moduleSpecifier.text.startsWith((0, env_1.ENTITY_PATH_IN_OAK_DOMAIN)())) {
|
||||
entityImported = moduleSpecifier.text.slice((0, env_1.ENTITY_PATH_IN_OAK_DOMAIN)().length);
|
||||
}
|
||||
}
|
||||
if (entityImported) {
|
||||
var importedFileName_1 = path_1.default.parse(moduleSpecifier.text).name;
|
||||
var namedBindings = importClause.namedBindings;
|
||||
(0, assert_1.default)(ts.isNamedImports(namedBindings));
|
||||
if (ts.isNamedImports(namedBindings)) {
|
||||
var elements = namedBindings.elements;
|
||||
(0, assert_1.default)(elements.find(function (ele) { return ts.isImportSpecifier(ele) && ele.name.text === entityImported && ele.propertyName.text === 'Schema'; }), "\u300C".concat(filename, "\u300D\u5BFC\u5165\u7684\u5BF9\u8C61\u540D\u79F0\u548C\u5BF9\u8C61\u6240\u5728\u7684\u6587\u4EF6\u540D\u79F0\u300C").concat(entityImported, "\u300D\u4E0D\u7B26"));
|
||||
return entityImported;
|
||||
if (elements.find(function (ele) { return ts.isImportSpecifier(ele) && ele.name.text === importedFileName_1 && ele.propertyName.text === 'Schema'; })) {
|
||||
entityImported = importedFileName_1;
|
||||
}
|
||||
}
|
||||
}
|
||||
return entityImported;
|
||||
}
|
||||
function checkLocaleEnumAttrs(node, attrs, filename) {
|
||||
var members = node.members;
|
||||
var memberKeys = members.map(function (ele) {
|
||||
|
|
@ -312,7 +311,7 @@ function checkLocaleExpressionPropertyExists(root, attr, exists, filename) {
|
|||
}
|
||||
});
|
||||
}
|
||||
function analyzeEntity(filename, path, program) {
|
||||
function analyzeEntity(filename, path, program, relativePath) {
|
||||
var _a;
|
||||
var fullPath = "".concat(path, "/").concat(filename);
|
||||
var sourceFile = program.getSourceFile(fullPath);
|
||||
|
|
@ -340,10 +339,21 @@ function analyzeEntity(filename, path, program) {
|
|||
ts.forEachChild(sourceFile, function (node) {
|
||||
var _a, _b, _c, _d;
|
||||
if (ts.isImportDeclaration(node)) {
|
||||
var entityImported = getEntityImported(node, filename);
|
||||
var entityImported = getEntityImported(node);
|
||||
if (entityImported) {
|
||||
referencedSchemas.push(entityImported);
|
||||
}
|
||||
else if (!process.env.COMPLING_IN_DOMAIN) {
|
||||
/**import了domain以外的其它定义类型,需要被复制到生成的Schema文件中
|
||||
* 这里必须注意,1、假设了domain当中定义的几个entity不会引用其它文件上的定义(除了type里的那些通用类型,默认都会被输出到文件中)
|
||||
* 2、假设了其它项目文件不会引用domain当中除了type通用类型之外的其它内容,否则不会被输出到文件中
|
||||
* 这里主要是对import的处理比较粗略,日后有需要的时候再精修
|
||||
*/
|
||||
var moduleSpecifier = node.moduleSpecifier, importClause = node.importClause;
|
||||
if (ts.isStringLiteral(moduleSpecifier) && !moduleSpecifier.text.startsWith(env_1.LIB_OAK_DOMAIN)) {
|
||||
console.log("".concat(filename, "\u4E2D\u53D1\u73B0\u4E86\u9700\u8981\u5BFC\u51FA\u5230Schema\u6587\u4EF6\u4E2D\u7684import"));
|
||||
}
|
||||
}
|
||||
}
|
||||
if (ts.isInterfaceDeclaration(node)) {
|
||||
// schema 定义
|
||||
|
|
@ -3085,7 +3095,7 @@ function analyzeInModi() {
|
|||
}
|
||||
}
|
||||
}
|
||||
function analyzeEntities(inputDir) {
|
||||
function analyzeEntities(inputDir, relativePath) {
|
||||
var files = (0, fs_1.readdirSync)(inputDir);
|
||||
var fullFilenames = files.map(function (ele) {
|
||||
var entity = ele.slice(0, ele.indexOf('.'));
|
||||
|
|
@ -3096,7 +3106,7 @@ function analyzeEntities(inputDir) {
|
|||
});
|
||||
var program = ts.createProgram(fullFilenames, { allowJs: true });
|
||||
files.forEach(function (filename) {
|
||||
analyzeEntity(filename, inputDir, program);
|
||||
analyzeEntity(filename, inputDir, program, relativePath);
|
||||
});
|
||||
analyzeInModi();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import path from 'path';
|
||||
import PathLib from 'path';
|
||||
import assert from 'assert';
|
||||
import { execSync } from 'child_process';
|
||||
import { writeFileSync, readdirSync, mkdirSync, fstat } from 'fs';
|
||||
|
|
@ -16,6 +16,7 @@ import {
|
|||
NUMERICAL_LITERL_DEFAULT_PRECISION,
|
||||
NUMERICAL_LITERL_DEFAULT_SCALE,
|
||||
INT_LITERL_DEFAULT_WIDTH,
|
||||
LIB_OAK_DOMAIN,
|
||||
} from './env';
|
||||
import { firstLetterLowerCase, firstLetterUpperCase } from '../utils/string';
|
||||
|
||||
|
|
@ -32,6 +33,7 @@ const Schema: Record<string, {
|
|||
inModi: boolean;
|
||||
hasRelationDef: boolean;
|
||||
enumStringAttrs: string[],
|
||||
additionalImports: ts.ImportDeclaration[],
|
||||
}> = {};
|
||||
const OneToMany: Record<string, Array<[string, string, boolean]>> = {};
|
||||
const ManyToOne: Record<string, Array<[string, string, boolean]>> = {};
|
||||
|
|
@ -252,7 +254,7 @@ function addActionSource(moduleName: string, name: ts.Identifier, node: ts.Impor
|
|||
const ast = ActionAsts[moduleName];
|
||||
const { moduleSpecifier } = node;
|
||||
|
||||
// 目前应该只会引用oak-domain/src/actions/action里的公共action
|
||||
// todo 目前应该只会引用oak-domain/src/actions/action里的公共action,未来如果有交叉引用这里代码要修正(如果domain中也有引用action_constants这里应该也会错)
|
||||
assert(ts.isStringLiteral(moduleSpecifier) && moduleSpecifier.text === ACTION_CONSTANT_IN_OAK_DOMAIN());
|
||||
assign(ast.importedFrom, {
|
||||
[name.text]: node,
|
||||
|
|
@ -408,31 +410,30 @@ function dealWithActions(moduleName: string, filename: string, node: ts.TypeNode
|
|||
);
|
||||
}
|
||||
|
||||
function getEntityImported(declaration: ts.ImportDeclaration, filename: string) {
|
||||
/**
|
||||
* entity的引用一定要以 import { Schema as XXX } from '..../XXX'这种形式
|
||||
* @param declaration
|
||||
* @param filename
|
||||
* @returns
|
||||
*/
|
||||
function getEntityImported(declaration: ts.ImportDeclaration) {
|
||||
const { moduleSpecifier, importClause } = declaration;
|
||||
let entityImported: string | undefined;
|
||||
if (ts.isStringLiteral(moduleSpecifier)) {
|
||||
if (moduleSpecifier.text.startsWith('./')) {
|
||||
entityImported = moduleSpecifier.text.slice(2);
|
||||
}
|
||||
else if (moduleSpecifier.text.startsWith(ENTITY_PATH_IN_OAK_GENERAL_BUSINESS())) {
|
||||
entityImported = moduleSpecifier.text.slice(ENTITY_PATH_IN_OAK_GENERAL_BUSINESS().length);
|
||||
}
|
||||
else if (moduleSpecifier.text.startsWith(ENTITY_PATH_IN_OAK_DOMAIN())) {
|
||||
entityImported = moduleSpecifier.text.slice(ENTITY_PATH_IN_OAK_DOMAIN().length)
|
||||
}
|
||||
}
|
||||
|
||||
if (entityImported) {
|
||||
if (ts.isStringLiteral(moduleSpecifier)) {
|
||||
const { name: importedFileName } = PathLib.parse(moduleSpecifier.text);
|
||||
const { namedBindings } = importClause!;
|
||||
assert(ts.isNamedImports(namedBindings!));
|
||||
if (ts.isNamedImports(namedBindings!)) {
|
||||
const { elements } = namedBindings!;
|
||||
assert(elements.find(
|
||||
ele => ts.isImportSpecifier(ele) && ele.name.text === entityImported && ele.propertyName!.text === 'Schema'
|
||||
), `「${filename}」导入的对象名称和对象所在的文件名称「${entityImported}」不符`);
|
||||
return entityImported;
|
||||
if (elements.find(
|
||||
ele => ts.isImportSpecifier(ele) && ele.name.text === importedFileName && ele.propertyName!.text === 'Schema'
|
||||
)) {
|
||||
entityImported = importedFileName;
|
||||
}
|
||||
}
|
||||
}
|
||||
return entityImported;
|
||||
}
|
||||
|
||||
function checkLocaleEnumAttrs(node: ts.TypeLiteralNode, attrs: string[], filename: string) {
|
||||
const { members } = node;
|
||||
|
|
@ -471,7 +472,7 @@ function checkLocaleExpressionPropertyExists(root: ts.ObjectLiteralExpression, a
|
|||
)
|
||||
}
|
||||
|
||||
function analyzeEntity(filename: string, path: string, program: ts.Program) {
|
||||
function analyzeEntity(filename: string, path: string, program: ts.Program, relativePath?: string) {
|
||||
const fullPath = `${path}/${filename}`;
|
||||
const sourceFile = program.getSourceFile(fullPath);
|
||||
const moduleName = filename.split('.')[0];
|
||||
|
|
@ -495,13 +496,35 @@ function analyzeEntity(filename: string, path: string, program: ts.Program) {
|
|||
const enumStringAttrs: string[] = [];
|
||||
const states: string[] = [];
|
||||
const localEnumStringTypes: string[] = [];
|
||||
const additionalImports: ts.ImportDeclaration[] = [];
|
||||
let localeDef: ts.ObjectLiteralExpression | undefined = undefined;
|
||||
ts.forEachChild(sourceFile!, (node) => {
|
||||
if (ts.isImportDeclaration(node)) {
|
||||
const entityImported = getEntityImported(node, filename);
|
||||
const entityImported = getEntityImported(node);
|
||||
if (entityImported) {
|
||||
referencedSchemas.push(entityImported);
|
||||
}
|
||||
else if (!process.env.COMPLING_IN_DOMAIN && !relativePath?.startsWith(LIB_OAK_DOMAIN)) {
|
||||
/**import了domain以外的其它定义类型,需要被复制到生成的Schema文件中
|
||||
* 这里必须注意,1、假设了domain当中定义的几个entity不会引用其它文件上的定义(除了type里的那些通用类型,默认都会被输出到文件中)
|
||||
* 2、假设了其它项目文件不会引用domain当中除了type通用类型之外的其它内容,否则不会被输出到文件中
|
||||
* 这里主要是对import的处理比较粗略,日后有需要的时候再精修
|
||||
*/
|
||||
const { moduleSpecifier, importClause } = node;
|
||||
if (ts.isStringLiteral(moduleSpecifier) && !moduleSpecifier.text.startsWith(LIB_OAK_DOMAIN)) {
|
||||
const moduleSpecifier2Text = relativePath ? PathLib.join(relativePath, '..', moduleSpecifier.text) : PathLib.join('..', moduleSpecifier.text);
|
||||
additionalImports.push(
|
||||
factory.updateImportDeclaration(
|
||||
node,
|
||||
undefined,
|
||||
undefined,
|
||||
importClause,
|
||||
factory.createStringLiteral(moduleSpecifier2Text),
|
||||
undefined
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ts.isInterfaceDeclaration(node)) {
|
||||
|
|
@ -1098,6 +1121,7 @@ function analyzeEntity(filename: string, path: string, program: ts.Program) {
|
|||
static: _static,
|
||||
hasRelationDef,
|
||||
enumStringAttrs: enumStringAttrs.concat(states),
|
||||
additionalImports,
|
||||
};
|
||||
if (hasFulltextIndex) {
|
||||
assign(schema, {
|
||||
|
|
@ -4514,7 +4538,7 @@ function outputSubQuery(outputDir: string, printer: ts.Printer) {
|
|||
resultFile
|
||||
);
|
||||
|
||||
const fileName = path.join(outputDir, '_SubQuery.ts');
|
||||
const fileName = PathLib.join(outputDir, '_SubQuery.ts');
|
||||
writeFileSync(fileName, result, { flag: 'w' });
|
||||
}
|
||||
|
||||
|
|
@ -4622,7 +4646,7 @@ function outputEntityDict(outputDir: string, printer: ts.Printer) {
|
|||
resultFile
|
||||
);
|
||||
|
||||
const fileName = path.join(outputDir, 'EntityDict.ts');
|
||||
const fileName = PathLib.join(outputDir, 'EntityDict.ts');
|
||||
writeFileSync(fileName, result, { flag: 'w' });
|
||||
}
|
||||
|
||||
|
|
@ -4746,6 +4770,10 @@ function outputSchema(outputDir: string, printer: ts.Printer) {
|
|||
)
|
||||
);
|
||||
}
|
||||
const { additionalImports } = Schema[entity];
|
||||
if (additionalImports?.length > 0) {
|
||||
statements.push(...additionalImports);
|
||||
}
|
||||
|
||||
constructSchema(statements, entity);
|
||||
constructFilter(statements, entity);
|
||||
|
|
@ -4894,7 +4922,7 @@ function outputSchema(outputDir: string, printer: ts.Printer) {
|
|||
)
|
||||
|
||||
const result = printer.printList(ts.ListFormat.SourceFileStatements, factory.createNodeArray(statements), Schema[entity].sourceFile);
|
||||
const fileName = path.join(outputDir, entity, 'Schema.ts');
|
||||
const fileName = PathLib.join(outputDir, entity, 'Schema.ts');
|
||||
writeFileSync(fileName, result, { flag: 'w' });
|
||||
}
|
||||
}
|
||||
|
|
@ -4951,7 +4979,7 @@ function outputAction(outputDir: string, printer: ts.Printer) {
|
|||
ts.ListFormat.SourceFileStatements,
|
||||
factory.createNodeArray(importStatements.concat(statements)),
|
||||
sourceFile);
|
||||
const filename = path.join(outputDir, entity, 'Action.ts');
|
||||
const filename = PathLib.join(outputDir, entity, 'Action.ts');
|
||||
writeFileSync(filename, result, { flag: 'w' });
|
||||
|
||||
actionDictStatements.push(
|
||||
|
|
@ -5004,7 +5032,7 @@ function outputAction(outputDir: string, printer: ts.Printer) {
|
|||
resultFile
|
||||
);
|
||||
|
||||
const fileName = path.join(outputDir, 'ActionDefDict.ts');
|
||||
const fileName = PathLib.join(outputDir, 'ActionDefDict.ts');
|
||||
writeFileSync(fileName, result, { flag: 'w' });
|
||||
}
|
||||
|
||||
|
|
@ -5382,7 +5410,7 @@ function outputLocale(outputDir: string, printer: ts.Printer) {
|
|||
]),
|
||||
sourceFile);
|
||||
const data = Function(result)();
|
||||
const filename = path.join(outputDir, entity, 'locales', `${lng}.json`);
|
||||
const filename = PathLib.join(outputDir, entity, 'locales', `${lng}.json`);
|
||||
writeFileSync(filename, JSON.stringify(data), { flag: 'w' });
|
||||
|
||||
if (locales[lng]) {
|
||||
|
|
@ -5714,7 +5742,7 @@ function outputStorage(outputDir: string, printer: ts.Printer) {
|
|||
ts.ListFormat.SourceFileStatements,
|
||||
factory.createNodeArray(statements),
|
||||
sourceFile);
|
||||
const filename = path.join(outputDir, entity, 'Storage.ts');
|
||||
const filename = PathLib.join(outputDir, entity, 'Storage.ts');
|
||||
writeFileSync(filename, result, { flag: 'w' });
|
||||
|
||||
importStatements.push(
|
||||
|
|
@ -5771,7 +5799,7 @@ function outputStorage(outputDir: string, printer: ts.Printer) {
|
|||
ts.ListFormat.SourceFileStatements,
|
||||
factory.createNodeArray(importStatements),
|
||||
ts.createSourceFile("someFileName.ts", "", ts.ScriptTarget.Latest, /*setParentNodes*/ false, ts.ScriptKind.TS));
|
||||
const filename = path.join(outputDir, 'Storage.ts');
|
||||
const filename = PathLib.join(outputDir, 'Storage.ts');
|
||||
writeFileSync(filename, result, { flag: 'w' });
|
||||
}
|
||||
|
||||
|
|
@ -5779,10 +5807,10 @@ function resetOutputDir(outputDir: string) {
|
|||
emptydirSync(outputDir);
|
||||
|
||||
for (const moduleName in Schema) {
|
||||
mkdirSync(path.join(outputDir, moduleName));
|
||||
mkdirSync(path.join(outputDir, moduleName, 'locales'));
|
||||
mkdirSync(PathLib.join(outputDir, moduleName));
|
||||
mkdirSync(PathLib.join(outputDir, moduleName, 'locales'));
|
||||
}
|
||||
mkdirSync(path.join(outputDir, '_locales'))
|
||||
mkdirSync(PathLib.join(outputDir, '_locales'))
|
||||
}
|
||||
|
||||
function addReverseRelationship() {
|
||||
|
|
@ -5801,7 +5829,7 @@ function outputIndexTs(outputDir: string) {
|
|||
export * from './Storage';
|
||||
export * from './ActionDefDict';
|
||||
`;
|
||||
const filename = path.join(outputDir, 'index.ts');
|
||||
const filename = PathLib.join(outputDir, 'index.ts');
|
||||
writeFileSync(filename, indexTs, { flag: 'w' });
|
||||
}
|
||||
|
||||
|
|
@ -5811,7 +5839,7 @@ function outputPackageJson(outputDir: string) {
|
|||
"main": "index.ts"
|
||||
};
|
||||
|
||||
const filename = path.join(outputDir, 'package.json');
|
||||
const filename = PathLib.join(outputDir, 'package.json');
|
||||
writeFileSync(filename, JSON.stringify(pj), { flag: 'w' });
|
||||
}
|
||||
|
||||
|
|
@ -5863,7 +5891,7 @@ function analyzeInModi() {
|
|||
}
|
||||
}
|
||||
|
||||
export function analyzeEntities(inputDir: string) {
|
||||
export function analyzeEntities(inputDir: string, relativePath?: string) {
|
||||
const files = readdirSync(inputDir);
|
||||
const fullFilenames = files.map(
|
||||
ele => {
|
||||
|
|
@ -5881,7 +5909,7 @@ export function analyzeEntities(inputDir: string) {
|
|||
|
||||
files.forEach(
|
||||
(filename) => {
|
||||
analyzeEntity(filename, inputDir, program);
|
||||
analyzeEntity(filename, inputDir, program, relativePath);
|
||||
}
|
||||
);
|
||||
analyzeInModi();
|
||||
|
|
|
|||
Loading…
Reference in New Issue