Merge branch 'dev' of gitea.51mars.com:Oak-Team/oak-db into dev
This commit is contained in:
commit
cb8b0428b4
|
|
@ -1,4 +1,4 @@
|
|||
import { EntityDict, OperateOption, OperationResult, TxnOption, StorageSchema, SelectOption, AggregationResult, Attribute, Index } from 'oak-domain/lib/types';
|
||||
import { EntityDict, OperateOption, OperationResult, TxnOption, StorageSchema, SelectOption, AggregationResult } from 'oak-domain/lib/types';
|
||||
import { EntityDict as BaseEntityDict } from 'oak-domain/lib/base-app-domain';
|
||||
import { CascadeStore } from 'oak-domain/lib/store/CascadeStore';
|
||||
import { MySQLConfiguration } from './types/Configuration';
|
||||
|
|
@ -7,7 +7,7 @@ import { MySqlTranslator, MySqlSelectOption, MysqlOperateOption } from './transl
|
|||
import { AsyncContext } from 'oak-domain/lib/store/AsyncRowStore';
|
||||
import { SyncContext } from 'oak-domain/lib/store/SyncRowStore';
|
||||
import { CreateEntityOption } from '../types/Translator';
|
||||
import { DbStore } from '../types/dbStore';
|
||||
import { DbStore, Plan } from '../types/dbStore';
|
||||
export declare class MysqlStore<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>> extends CascadeStore<ED> implements DbStore<ED, Cxt> {
|
||||
protected countAbjointRow<T extends keyof ED, OP extends SelectOption, Cxt extends SyncContext<ED>>(entity: T, selection: Pick<ED[T]['Selection'], 'filter' | 'count'>, context: Cxt, option: OP): number;
|
||||
protected aggregateAbjointRowSync<T extends keyof ED, OP extends SelectOption, Cxt extends SyncContext<ED>>(entity: T, aggregation: ED[T]['Aggregation'], context: Cxt, option: OP): AggregationResult<ED[T]['Schema']>;
|
||||
|
|
@ -48,16 +48,3 @@ export declare class MysqlStore<ED extends EntityDict & BaseEntityDict, Cxt exte
|
|||
*/
|
||||
diffSchema(schemaOld: StorageSchema<any>, schemaNew: StorageSchema<any>): Plan;
|
||||
}
|
||||
type Plan = {
|
||||
newTables: Record<string, {
|
||||
attributes: Record<string, Attribute>;
|
||||
}>;
|
||||
newIndexes: Record<string, Index<any>[]>;
|
||||
updatedTables: Record<string, {
|
||||
attributes: Record<string, Attribute & {
|
||||
isNew: boolean;
|
||||
}>;
|
||||
}>;
|
||||
updatedIndexes: Record<string, Index<any>[]>;
|
||||
};
|
||||
export {};
|
||||
|
|
|
|||
|
|
@ -81,6 +81,16 @@ class MySqlTranslator extends sqlTranslator_1.SqlTranslator {
|
|||
makeUpSchema() {
|
||||
for (const entity in this.schema) {
|
||||
const { attributes, indexes } = this.schema[entity];
|
||||
// 非特殊索引自动添加 $$deleteAt$$ (by qcqcqc)
|
||||
for (const index of indexes || []) {
|
||||
if (index.config?.type) {
|
||||
continue;
|
||||
}
|
||||
const indexAttrNames = index.attributes.map(attr => attr.name);
|
||||
if (!indexAttrNames.includes('$$deleteAt$$')) {
|
||||
index.attributes.push({ name: '$$deleteAt$$' });
|
||||
}
|
||||
}
|
||||
const geoIndexes = [];
|
||||
for (const attr in attributes) {
|
||||
if (attributes[attr].type === 'geometry') {
|
||||
|
|
@ -872,8 +882,10 @@ class MySqlTranslator extends sqlTranslator_1.SqlTranslator {
|
|||
const refId = (expr)['#refId'];
|
||||
const refAttr = (expr)['#refAttr'];
|
||||
(0, assert_1.default)(refDict[refId]);
|
||||
const attrText = `\`${refDict[refId][0]}\`.\`${refAttr}\``;
|
||||
result = this.translateAttrInExpression(entity, (expr)['#refAttr'], attrText);
|
||||
const [refAlias, refEntity] = refDict[refId];
|
||||
const attrText = `\`${refAlias}\`.\`${refAttr}\``;
|
||||
// 这里必须使用refEntity,否则在filter深层嵌套节点表达式时会出现entity不对应
|
||||
result = this.translateAttrInExpression(refEntity, (expr)['#refAttr'], attrText);
|
||||
}
|
||||
else {
|
||||
(0, assert_1.default)(k.length === 1);
|
||||
|
|
|
|||
|
|
@ -16,10 +16,6 @@ export declare class PostgreSQLConnector {
|
|||
exec(sql: string, txn?: string): Promise<[QueryResultRow[], QueryResult]>;
|
||||
commitTransaction(txn: string): Promise<void>;
|
||||
rollbackTransaction(txn: string): Promise<void>;
|
||||
/**
|
||||
* 执行多条 SQL 语句(用于初始化等场景)
|
||||
*/
|
||||
execBatch(sqls: string[], txn?: string): Promise<void>;
|
||||
/**
|
||||
* 获取连接池状态
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -133,16 +133,6 @@ class PostgreSQLConnector {
|
|||
connection.release();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* 执行多条 SQL 语句(用于初始化等场景)
|
||||
*/
|
||||
async execBatch(sqls, txn) {
|
||||
for (const sql of sqls) {
|
||||
if (sql.trim()) {
|
||||
await this.exec(sql, txn);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* 获取连接池状态
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import { PostgreSQLTranslator, PostgreSQLSelectOption, PostgreSQLOperateOption }
|
|||
import { AsyncContext } from 'oak-domain/lib/store/AsyncRowStore';
|
||||
import { SyncContext } from 'oak-domain/lib/store/SyncRowStore';
|
||||
import { CreateEntityOption } from '../types/Translator';
|
||||
import { DbStore } from '../types/dbStore';
|
||||
import { DbStore, Plan } from '../types/dbStore';
|
||||
export declare class PostgreSQLStore<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>> extends CascadeStore<ED> implements DbStore<ED, Cxt> {
|
||||
protected countAbjointRow<T extends keyof ED, OP extends SelectOption, Cxt extends SyncContext<ED>>(entity: T, selection: Pick<ED[T]['Selection'], 'filter' | 'count'>, context: Cxt, option: OP): number;
|
||||
protected aggregateAbjointRowSync<T extends keyof ED, OP extends SelectOption, Cxt extends SyncContext<ED>>(entity: T, aggregation: ED[T]['Aggregation'], context: Cxt, option: OP): AggregationResult<ED[T]['Schema']>;
|
||||
|
|
@ -35,4 +35,16 @@ export declare class PostgreSQLStore<ED extends EntityDict & BaseEntityDict, Cxt
|
|||
connect(): Promise<void>;
|
||||
disconnect(): Promise<void>;
|
||||
initialize(option: CreateEntityOption): Promise<void>;
|
||||
readSchema(): Promise<StorageSchema<ED>>;
|
||||
/**
|
||||
* 根据载入的dataSchema,和数据库中原来的schema,决定如何来upgrade
|
||||
* 制订出来的plan分为两阶段:增加阶段和削减阶段,在两个阶段之间,由用户来修正数据
|
||||
*/
|
||||
makeUpgradePlan(): Promise<Plan>;
|
||||
/**
|
||||
* 比较两个schema的不同,这里计算的是new对old的增量
|
||||
* @param schemaOld
|
||||
* @param schemaNew
|
||||
*/
|
||||
diffSchema(schemaOld: StorageSchema<any>, schemaNew: StorageSchema<any>): Plan;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,6 +8,12 @@ const translator_1 = require("./translator");
|
|||
const lodash_1 = require("lodash");
|
||||
const assert_1 = tslib_1.__importDefault(require("assert"));
|
||||
const relation_1 = require("oak-domain/lib/store/relation");
|
||||
const ToNumberAttrs = new Set([
|
||||
'$$seq$$',
|
||||
'$$createAt$$',
|
||||
'$$updateAt$$',
|
||||
'$$deleteAt$$',
|
||||
]);
|
||||
function convertGeoTextToObject(geoText) {
|
||||
if (geoText.startsWith('POINT')) {
|
||||
const coord = geoText.match(/(-?\d+\.?\d*)/g);
|
||||
|
|
@ -121,12 +127,19 @@ class PostgreSQLStore extends CascadeStore_1.CascadeStore {
|
|||
const { type } = attributes[attr];
|
||||
switch (type) {
|
||||
case 'date':
|
||||
case 'time': {
|
||||
case 'time':
|
||||
case 'datetime': {
|
||||
if (value instanceof Date) {
|
||||
r[attr] = value.valueOf();
|
||||
}
|
||||
else {
|
||||
r[attr] = value;
|
||||
if (typeof value === 'string') {
|
||||
r[attr] = parseInt(value, 10);
|
||||
}
|
||||
else {
|
||||
(0, assert_1.default)(typeof value === 'number' || value === null);
|
||||
r[attr] = value;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
|
@ -203,6 +216,14 @@ class PostgreSQLStore extends CascadeStore_1.CascadeStore {
|
|||
// PostgreSQL count 返回字符串
|
||||
r[attr] = parseInt(value, 10);
|
||||
}
|
||||
else if (attr.startsWith("#sum") || attr.startsWith("#avg") || attr.startsWith("#min") || attr.startsWith("#max")) {
|
||||
// PostgreSQL sum/avg/min/max 返回字符串
|
||||
r[attr] = parseFloat(value);
|
||||
}
|
||||
else if (ToNumberAttrs.has(attr)) {
|
||||
// PostgreSQL sum/avg/min/max 返回字符串
|
||||
r[attr] = parseInt(value, 10);
|
||||
}
|
||||
else {
|
||||
r[attr] = value;
|
||||
}
|
||||
|
|
@ -315,15 +336,192 @@ class PostgreSQLStore extends CascadeStore_1.CascadeStore {
|
|||
await this.connector.disconnect();
|
||||
}
|
||||
async initialize(option) {
|
||||
const schema = this.getSchema();
|
||||
// 可选:先创建 PostGIS 扩展
|
||||
// await this.connector.exec('CREATE EXTENSION IF NOT EXISTS postgis;');
|
||||
for (const entity in schema) {
|
||||
const sqls = this.translator.translateCreateEntity(entity, option);
|
||||
for (const sql of sqls) {
|
||||
await this.connector.exec(sql);
|
||||
// PG的DDL支持事务,所以这里直接用一个事务包裹所有的初始化操作
|
||||
const txn = await this.connector.startTransaction({
|
||||
isolationLevel: 'serializable',
|
||||
});
|
||||
try {
|
||||
const schema = this.getSchema();
|
||||
let hasGeoType = false;
|
||||
let hasChineseTsConfig = false;
|
||||
for (const entity in schema) {
|
||||
const { attributes, indexes } = schema[entity];
|
||||
for (const attr in attributes) {
|
||||
const { type } = attributes[attr];
|
||||
if (type === 'geometry') {
|
||||
hasGeoType = true;
|
||||
}
|
||||
}
|
||||
for (const index of indexes || []) {
|
||||
if (index.config?.tsConfig === 'chinese' || index.config?.tsConfig?.includes('chinese')) {
|
||||
hasChineseTsConfig = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (hasGeoType) {
|
||||
console.log('Initializing PostGIS extension for geometry support...');
|
||||
await this.connector.exec('CREATE EXTENSION IF NOT EXISTS postgis;');
|
||||
}
|
||||
if (hasChineseTsConfig) {
|
||||
console.log('Initializing Chinese text search configuration...');
|
||||
const checkChineseConfigSql = `
|
||||
SELECT COUNT(*) as cnt
|
||||
FROM pg_catalog.pg_ts_config
|
||||
WHERE cfgname = 'chinese';
|
||||
`;
|
||||
const result = await this.connector.exec(checkChineseConfigSql);
|
||||
const count = parseInt(result[0][0]?.cnt || '0', 10);
|
||||
if (count === 0) {
|
||||
const createChineseConfigSql = `
|
||||
CREATE EXTENSION IF NOT EXISTS zhparser;
|
||||
CREATE TEXT SEARCH CONFIGURATION chinese (PARSER = zhparser);
|
||||
ALTER TEXT SEARCH CONFIGURATION chinese ADD MAPPING FOR n,v,a,i,e,l WITH simple;
|
||||
`;
|
||||
await this.connector.exec(createChineseConfigSql);
|
||||
}
|
||||
}
|
||||
for (const entity in schema) {
|
||||
const sqls = this.translator.translateCreateEntity(entity, option);
|
||||
for (const sql of sqls) {
|
||||
await this.connector.exec(sql, txn);
|
||||
}
|
||||
}
|
||||
await this.connector.commitTransaction(txn);
|
||||
}
|
||||
catch (error) {
|
||||
await this.connector.rollbackTransaction(txn);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// 从数据库中读取当前schema
|
||||
readSchema() {
|
||||
return this.translator.readSchema((sql) => this.connector.exec(sql));
|
||||
}
|
||||
/**
|
||||
* 根据载入的dataSchema,和数据库中原来的schema,决定如何来upgrade
|
||||
* 制订出来的plan分为两阶段:增加阶段和削减阶段,在两个阶段之间,由用户来修正数据
|
||||
*/
|
||||
async makeUpgradePlan() {
|
||||
const originSchema = await this.readSchema();
|
||||
const plan = this.diffSchema(originSchema, this.translator.schema);
|
||||
return plan;
|
||||
}
|
||||
/**
|
||||
* 比较两个schema的不同,这里计算的是new对old的增量
|
||||
* @param schemaOld
|
||||
* @param schemaNew
|
||||
*/
|
||||
diffSchema(schemaOld, schemaNew) {
|
||||
const plan = {
|
||||
newTables: {},
|
||||
newIndexes: {},
|
||||
updatedIndexes: {},
|
||||
updatedTables: {},
|
||||
};
|
||||
for (const table in schemaNew) {
|
||||
// PostgreSQL 表名区分大小写(使用双引号时)
|
||||
if (schemaOld[table]) {
|
||||
const { attributes, indexes } = schemaOld[table];
|
||||
const { attributes: attributesNew, indexes: indexesNew } = schemaNew[table];
|
||||
const assignToUpdateTables = (attr, isNew) => {
|
||||
const skipAttrs = ['$$seq$$', '$$createAt$$', '$$updateAt$$', '$$deleteAt$$', 'id'];
|
||||
if (skipAttrs.includes(attr)) {
|
||||
return;
|
||||
}
|
||||
if (!plan.updatedTables[table]) {
|
||||
plan.updatedTables[table] = {
|
||||
attributes: {
|
||||
[attr]: {
|
||||
...attributesNew[attr],
|
||||
isNew,
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
else {
|
||||
plan.updatedTables[table].attributes[attr] = {
|
||||
...attributesNew[attr],
|
||||
isNew,
|
||||
};
|
||||
}
|
||||
};
|
||||
for (const attr in attributesNew) {
|
||||
if (attributes[attr]) {
|
||||
// 比较两次创建的属性定义是否一致
|
||||
const sql1 = this.translator.translateAttributeDef(attr, attributesNew[attr]);
|
||||
const sql2 = this.translator.translateAttributeDef(attr, attributes[attr]);
|
||||
if (!this.translator.compareSql(sql1, sql2)) {
|
||||
assignToUpdateTables(attr, false);
|
||||
}
|
||||
}
|
||||
else {
|
||||
assignToUpdateTables(attr, true);
|
||||
}
|
||||
}
|
||||
if (indexesNew) {
|
||||
const assignToIndexes = (index, isNew) => {
|
||||
if (isNew) {
|
||||
if (plan.newIndexes[table]) {
|
||||
plan.newIndexes[table].push(index);
|
||||
}
|
||||
else {
|
||||
plan.newIndexes[table] = [index];
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (plan.updatedIndexes[table]) {
|
||||
plan.updatedIndexes[table].push(index);
|
||||
}
|
||||
else {
|
||||
plan.updatedIndexes[table] = [index];
|
||||
}
|
||||
}
|
||||
};
|
||||
const compareConfig = (config1, config2) => {
|
||||
const unique1 = config1?.unique || false;
|
||||
const unique2 = config2?.unique || false;
|
||||
if (unique1 !== unique2) {
|
||||
return false;
|
||||
}
|
||||
const type1 = config1?.type || 'btree';
|
||||
const type2 = config2?.type || 'btree';
|
||||
// tsConfig 比较
|
||||
const tsConfig1 = config1?.tsConfig;
|
||||
const tsConfig2 = config2?.tsConfig;
|
||||
if (JSON.stringify(tsConfig1) !== JSON.stringify(tsConfig2)) {
|
||||
return false;
|
||||
}
|
||||
return type1 === type2;
|
||||
};
|
||||
for (const index of indexesNew) {
|
||||
const { name, config, attributes: indexAttrs } = index;
|
||||
const origin = indexes?.find(ele => ele.name === name);
|
||||
if (origin) {
|
||||
if (JSON.stringify(indexAttrs) !== JSON.stringify(origin.attributes)) {
|
||||
assignToIndexes(index, false);
|
||||
}
|
||||
else {
|
||||
if (!compareConfig(config, origin.config)) {
|
||||
assignToIndexes(index, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
assignToIndexes(index, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
plan.newTables[table] = {
|
||||
attributes: schemaNew[table].attributes,
|
||||
};
|
||||
if (schemaNew[table].indexes) {
|
||||
plan.newIndexes[table] = schemaNew[table].indexes;
|
||||
}
|
||||
}
|
||||
}
|
||||
return plan;
|
||||
}
|
||||
}
|
||||
exports.PostgreSQLStore = PostgreSQLStore;
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { EntityDict, Q_FullTextValue, RefOrExpression, Ref, StorageSchema } from "oak-domain/lib/types";
|
||||
import { EntityDict, Q_FullTextValue, RefOrExpression, Ref, StorageSchema, Attribute } from "oak-domain/lib/types";
|
||||
import { EntityDict as BaseEntityDict } from 'oak-domain/lib/base-app-domain';
|
||||
import { DataType } from "oak-domain/lib/types/schema/DataTypes";
|
||||
import { SqlOperateOption, SqlSelectOption, SqlTranslator } from "../sqlTranslator";
|
||||
|
|
@ -80,4 +80,24 @@ export declare class PostgreSQLTranslator<ED extends EntityDict & BaseEntityDict
|
|||
translateUpsert<T extends keyof ED>(entity: T, data: ED[T]['CreateMulti']['data'], conflictKeys: string[], updateAttrs?: string[]): string;
|
||||
protected populateUpdateStmt(updateText: string, fromText: string, aliasDict: Record<string, string>, filterText: string, sorterText?: string, indexFrom?: number, count?: number, option?: PostgreSQLOperateOption): string;
|
||||
protected populateRemoveStmt(updateText: string, fromText: string, aliasDict: Record<string, string>, filterText: string, sorterText?: string, indexFrom?: number, count?: number, option?: PostgreSQLOperateOption): string;
|
||||
/**
|
||||
* 将 PostgreSQL 返回的 Type 回译成 oak 的类型,是 populateDataTypeDef 的反函数
|
||||
* @param type PostgreSQL 类型字符串
|
||||
*/
|
||||
private reTranslateToAttribute;
|
||||
/**
|
||||
* 从 PostgreSQL 数据库读取当前的 schema 结构
|
||||
*/
|
||||
readSchema(execFn: (sql: string) => Promise<any>): Promise<StorageSchema<ED>>;
|
||||
/**
|
||||
* 将属性定义转换为 PostgreSQL DDL 语句
|
||||
* @param attr 属性名
|
||||
* @param attrDef 属性定义
|
||||
*/
|
||||
translateAttributeDef(attr: string, attrDef: Attribute): string;
|
||||
/**
|
||||
* 比较两个 SQL 语句是否等价(用于 schema diff)
|
||||
* 忽略空格、大小写等格式差异
|
||||
*/
|
||||
compareSql(sql1: string, sql2: string): boolean;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -171,6 +171,16 @@ class PostgreSQLTranslator extends sqlTranslator_1.SqlTranslator {
|
|||
makeUpSchema() {
|
||||
for (const entity in this.schema) {
|
||||
const { attributes, indexes } = this.schema[entity];
|
||||
// 非特殊索引自动添加 $$deleteAt$$
|
||||
for (const index of indexes || []) {
|
||||
if (index.config?.type) {
|
||||
continue;
|
||||
}
|
||||
const indexAttrNames = index.attributes.map(attr => attr.name);
|
||||
if (!indexAttrNames.includes('$$deleteAt$$')) {
|
||||
index.attributes.push({ name: '$$deleteAt$$' });
|
||||
}
|
||||
}
|
||||
const geoIndexes = [];
|
||||
for (const attr in attributes) {
|
||||
if (attributes[attr].type === 'geometry') {
|
||||
|
|
@ -741,6 +751,7 @@ class PostgreSQLTranslator extends sqlTranslator_1.SqlTranslator {
|
|||
else if (typeof value === 'number') {
|
||||
return `${value}`;
|
||||
}
|
||||
(0, assert_1.default)(typeof value === 'string', 'Invalid date/time value');
|
||||
return `'${(new Date(value)).valueOf()}'`;
|
||||
}
|
||||
case 'object':
|
||||
|
|
@ -967,12 +978,8 @@ class PostgreSQLTranslator extends sqlTranslator_1.SqlTranslator {
|
|||
}
|
||||
indexSql += '(';
|
||||
const indexColumns = [];
|
||||
let includeDeleteAt = false;
|
||||
for (const indexAttr of indexAttrs) {
|
||||
const { name: attrName, direction } = indexAttr;
|
||||
if (attrName === '$$deleteAt$$') {
|
||||
includeDeleteAt = true;
|
||||
}
|
||||
if (indexType === 'fulltext') {
|
||||
// 全文索引:使用 to_tsvector
|
||||
indexColumns.push(`to_tsvector('${tsLang}', COALESCE("${attrName}", ''))`);
|
||||
|
|
@ -986,10 +993,6 @@ class PostgreSQLTranslator extends sqlTranslator_1.SqlTranslator {
|
|||
indexColumns.push(col);
|
||||
}
|
||||
}
|
||||
// 非特殊索引自动包含 deleteAt
|
||||
if (!includeDeleteAt && !indexType) {
|
||||
indexColumns.push('"$$deleteAt$$"');
|
||||
}
|
||||
indexSql += indexColumns.join(', ');
|
||||
indexSql += ');';
|
||||
sqls.push(indexSql);
|
||||
|
|
@ -1766,5 +1769,371 @@ class PostgreSQLTranslator extends sqlTranslator_1.SqlTranslator {
|
|||
// 这个方法不应该被直接调用了,因为translateRemove已经重写
|
||||
throw new Error('populateRemoveStmt should not be called directly in PostgreSQL. Use translateRemove instead.');
|
||||
}
|
||||
/**
|
||||
* 将 PostgreSQL 返回的 Type 回译成 oak 的类型,是 populateDataTypeDef 的反函数
|
||||
* @param type PostgreSQL 类型字符串
|
||||
*/
|
||||
reTranslateToAttribute(type) {
|
||||
// 处理带长度的类型:character varying(255), character(10)
|
||||
const varcharMatch = /^character varying\((\d+)\)$/.exec(type);
|
||||
if (varcharMatch) {
|
||||
return {
|
||||
type: 'varchar',
|
||||
params: {
|
||||
length: parseInt(varcharMatch[1], 10),
|
||||
}
|
||||
};
|
||||
}
|
||||
const charMatch = /^character\((\d+)\)$/.exec(type);
|
||||
if (charMatch) {
|
||||
return {
|
||||
type: 'char',
|
||||
params: {
|
||||
length: parseInt(charMatch[1], 10),
|
||||
}
|
||||
};
|
||||
}
|
||||
// 处理带精度和小数位的类型:numeric(10,2)
|
||||
const numericWithScaleMatch = /^numeric\((\d+),(\d+)\)$/.exec(type);
|
||||
if (numericWithScaleMatch) {
|
||||
return {
|
||||
type: 'decimal',
|
||||
params: {
|
||||
precision: parseInt(numericWithScaleMatch[1], 10),
|
||||
scale: parseInt(numericWithScaleMatch[2], 10),
|
||||
},
|
||||
};
|
||||
}
|
||||
// 处理只带精度的类型:numeric(10), timestamp(6)
|
||||
const numericMatch = /^numeric\((\d+)\)$/.exec(type);
|
||||
if (numericMatch) {
|
||||
return {
|
||||
type: 'decimal',
|
||||
params: {
|
||||
precision: parseInt(numericMatch[1], 10),
|
||||
scale: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
const timestampMatch = /^timestamp\((\d+)\) without time zone$/.exec(type);
|
||||
if (timestampMatch) {
|
||||
return {
|
||||
type: 'timestamp',
|
||||
params: {
|
||||
precision: parseInt(timestampMatch[1], 10),
|
||||
},
|
||||
};
|
||||
}
|
||||
const timeMatch = /^time\((\d+)\) without time zone$/.exec(type);
|
||||
if (timeMatch) {
|
||||
return {
|
||||
type: 'time',
|
||||
params: {
|
||||
precision: parseInt(timeMatch[1], 10),
|
||||
},
|
||||
};
|
||||
}
|
||||
// PostgreSQL 类型映射到 oak 类型
|
||||
const typeMap = {
|
||||
'bigint': 'bigint',
|
||||
'integer': 'integer',
|
||||
'smallint': 'smallint',
|
||||
'real': 'real',
|
||||
'double precision': 'double precision',
|
||||
'boolean': 'boolean',
|
||||
'text': 'text',
|
||||
'jsonb': 'object', // 框架使用 jsonb 存储 object/array
|
||||
'json': 'object',
|
||||
'bytea': 'bytea',
|
||||
'character varying': 'varchar',
|
||||
'character': 'char',
|
||||
'timestamp without time zone': 'timestamp',
|
||||
'time without time zone': 'time',
|
||||
'date': 'date',
|
||||
'uuid': 'uuid',
|
||||
'geometry': 'geometry',
|
||||
};
|
||||
const mappedType = typeMap[type];
|
||||
if (mappedType) {
|
||||
return { type: mappedType };
|
||||
}
|
||||
// 如果是用户定义的枚举类型,返回 enum(具体值需要额外查询)
|
||||
// 这里先返回基础类型,枚举值在 readSchema 中单独处理
|
||||
return { type: type };
|
||||
}
|
||||
/**
|
||||
* 从 PostgreSQL 数据库读取当前的 schema 结构
|
||||
*/
|
||||
async readSchema(execFn) {
|
||||
const result = {};
|
||||
// 1. 获取所有表
|
||||
const tablesSql = `
|
||||
SELECT tablename
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
ORDER BY tablename;
|
||||
`;
|
||||
const [tablesResult] = await execFn(tablesSql);
|
||||
for (const tableRow of tablesResult) {
|
||||
const tableName = tableRow.tablename;
|
||||
// 2. 获取表的列信息
|
||||
const columnsSql = `
|
||||
SELECT
|
||||
column_name,
|
||||
data_type,
|
||||
character_maximum_length,
|
||||
numeric_precision,
|
||||
numeric_scale,
|
||||
is_nullable,
|
||||
column_default,
|
||||
udt_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = '${tableName}'
|
||||
ORDER BY ordinal_position;
|
||||
`;
|
||||
const [columnsResult] = await execFn(columnsSql);
|
||||
const attributes = {};
|
||||
for (const col of columnsResult) {
|
||||
const { column_name: colName, data_type: dataType, character_maximum_length: maxLength, numeric_precision: precision, numeric_scale: scale, is_nullable: isNullable, column_default: defaultValue, udt_name: udtName, } = col;
|
||||
let attr;
|
||||
// 处理用户定义类型(枚举)
|
||||
if (dataType === 'USER-DEFINED') {
|
||||
// 查询枚举值
|
||||
const enumSql = `
|
||||
SELECT e.enumlabel
|
||||
FROM pg_type t
|
||||
JOIN pg_enum e ON t.oid = e.enumtypid
|
||||
WHERE t.typname = '${udtName}'
|
||||
ORDER BY e.enumsortorder;
|
||||
`;
|
||||
const [enumResult] = await execFn(enumSql);
|
||||
const enumeration = enumResult.map((r) => r.enumlabel);
|
||||
attr = {
|
||||
type: 'enum',
|
||||
enumeration,
|
||||
};
|
||||
}
|
||||
else {
|
||||
// 构建完整的类型字符串
|
||||
let fullType = dataType;
|
||||
if (maxLength) {
|
||||
fullType = `${dataType}(${maxLength})`;
|
||||
}
|
||||
else if (precision !== null && scale !== null) {
|
||||
fullType = `${dataType}(${precision},${scale})`;
|
||||
}
|
||||
else if (precision !== null) {
|
||||
fullType = `${dataType}(${precision})`;
|
||||
}
|
||||
attr = this.reTranslateToAttribute(fullType);
|
||||
}
|
||||
// 处理约束
|
||||
attr.notNull = isNullable === 'NO';
|
||||
// 处理默认值(简化处理,复杂的默认值可能需要更精细的解析)
|
||||
if (defaultValue && !defaultValue.includes('nextval')) {
|
||||
// 跳过序列默认值
|
||||
// 简单处理:移除类型转换
|
||||
let cleanDefault = defaultValue.replace(/::[a-z]+/gi, '').replace(/'/g, '');
|
||||
if (cleanDefault === 'true') {
|
||||
attr.default = true;
|
||||
}
|
||||
else if (cleanDefault === 'false') {
|
||||
attr.default = false;
|
||||
}
|
||||
else if (!isNaN(Number(cleanDefault))) {
|
||||
attr.default = Number(cleanDefault);
|
||||
}
|
||||
else {
|
||||
attr.default = cleanDefault;
|
||||
}
|
||||
}
|
||||
// 检查是否是序列列(IDENTITY)
|
||||
if (colName === '$$seq$$' || (defaultValue && defaultValue.includes('nextval'))) {
|
||||
attr.sequenceStart = 10000; // 默认起始值
|
||||
}
|
||||
attributes[colName] = attr;
|
||||
}
|
||||
// 3. 获取索引信息
|
||||
const indexesSql = `
|
||||
SELECT
|
||||
i.relname as index_name,
|
||||
ix.indisunique as is_unique,
|
||||
am.amname as index_type,
|
||||
pg_get_indexdef(ix.indexrelid) as index_def
|
||||
FROM pg_class t
|
||||
JOIN pg_index ix ON t.oid = ix.indrelid
|
||||
JOIN pg_class i ON i.oid = ix.indexrelid
|
||||
JOIN pg_am am ON i.relam = am.oid
|
||||
WHERE t.relname = '${tableName}'
|
||||
AND t.relkind = 'r'
|
||||
AND i.relname NOT LIKE '%_pkey'
|
||||
AND NOT ix.indisprimary
|
||||
ORDER BY i.relname;
|
||||
`;
|
||||
const [indexesResult] = await execFn(indexesSql);
|
||||
if (indexesResult.length > 0) {
|
||||
const indexes = [];
|
||||
for (const row of indexesResult) {
|
||||
const { index_name: indexName, is_unique: isUnique, index_type: indexType, index_def: indexDef } = row;
|
||||
// 解析索引定义以获取列名和配置
|
||||
const index = {
|
||||
name: indexName,
|
||||
attributes: [],
|
||||
};
|
||||
// 解析索引定义字符串
|
||||
// 示例: CREATE INDEX "user_index_fulltext_chinese" ON public."user" USING gin (to_tsvector('chinese'::regconfig, (COALESCE(name, ''::text) || ' '::text) || COALESCE(nickname, ''::text)))
|
||||
if (indexType === 'gin' && indexDef.includes('to_tsvector')) {
|
||||
// 全文索引
|
||||
index.config = { type: 'fulltext' };
|
||||
// 提取 tsConfig
|
||||
const tsConfigMatch = indexDef.match(/to_tsvector\('([^']+)'/);
|
||||
if (tsConfigMatch) {
|
||||
const tsConfig = tsConfigMatch[1];
|
||||
index.config.tsConfig = tsConfig;
|
||||
}
|
||||
// 提取列名(从 COALESCE 中)
|
||||
const columnMatches = indexDef.matchAll(/COALESCE\("?([^",\s]+)"?/g);
|
||||
const columns = Array.from(columnMatches, m => m[1]);
|
||||
index.attributes = columns.map(col => ({ name: col }));
|
||||
// 处理多语言索引的情况:移除语言后缀
|
||||
// 例如: user_index_fulltext_chinese -> index_fulltext
|
||||
const nameParts = indexName.split('_');
|
||||
if (nameParts.length > 2) {
|
||||
const possibleLang = nameParts[nameParts.length - 1];
|
||||
// 如果最后一部分是语言代码,移除它
|
||||
if (['chinese', 'english', 'simple', 'german', 'french', 'spanish', 'russian', 'japanese'].includes(possibleLang)) {
|
||||
index.name = nameParts.slice(0, -1).join('_');
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (indexType === 'gist') {
|
||||
// 空间索引
|
||||
index.config = { type: 'spatial' };
|
||||
// 提取列名
|
||||
const columnMatch = indexDef.match(/\(([^)]+)\)/);
|
||||
if (columnMatch) {
|
||||
const columns = columnMatch[1].split(',').map(c => c.trim().replace(/"/g, ''));
|
||||
index.attributes = columns.map(col => ({ name: col }));
|
||||
}
|
||||
}
|
||||
else if (indexType === 'hash') {
|
||||
// 哈希索引
|
||||
index.config = { type: 'hash' };
|
||||
// 提取列名
|
||||
const columnMatch = indexDef.match(/\(([^)]+)\)/);
|
||||
if (columnMatch) {
|
||||
const columns = columnMatch[1].split(',').map(c => c.trim().replace(/"/g, ''));
|
||||
index.attributes = columns.map(col => ({ name: col }));
|
||||
}
|
||||
}
|
||||
else {
|
||||
// B-tree 索引(默认)
|
||||
// 提取列名和排序方向
|
||||
const columnMatch = indexDef.match(/\(([^)]+)\)/);
|
||||
if (columnMatch) {
|
||||
const columnDefs = columnMatch[1].split(',');
|
||||
index.attributes = columnDefs.map(colDef => {
|
||||
const trimmed = colDef.trim().replace(/"/g, '');
|
||||
const parts = trimmed.split(/\s+/);
|
||||
const attr = { name: parts[0] };
|
||||
// 检查排序方向
|
||||
if (parts.includes('DESC')) {
|
||||
attr.direction = 'DESC';
|
||||
}
|
||||
else if (parts.includes('ASC')) {
|
||||
attr.direction = 'ASC';
|
||||
}
|
||||
return attr;
|
||||
});
|
||||
}
|
||||
// 如果是唯一索引
|
||||
if (isUnique) {
|
||||
index.config = { unique: true };
|
||||
}
|
||||
}
|
||||
// 移除表名前缀(如果存在)
|
||||
// 例如: user_index_fulltext -> index_fulltext
|
||||
if (index.name.startsWith(`${tableName}_`)) {
|
||||
index.name = index.name.substring(tableName.length + 1);
|
||||
}
|
||||
indexes.push(index);
|
||||
}
|
||||
Object.assign(result, {
|
||||
[tableName]: {
|
||||
attributes,
|
||||
indexes,
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
Object.assign(result, {
|
||||
[tableName]: {
|
||||
attributes,
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* 将属性定义转换为 PostgreSQL DDL 语句
|
||||
* @param attr 属性名
|
||||
* @param attrDef 属性定义
|
||||
*/
|
||||
translateAttributeDef(attr, attrDef) {
|
||||
let sql = `"${attr}" `;
|
||||
const { type, params, default: defaultValue, unique, notNull, sequenceStart, enumeration, } = attrDef;
|
||||
// 处理序列类型(IDENTITY)
|
||||
if (typeof sequenceStart === 'number') {
|
||||
sql += `bigint GENERATED BY DEFAULT AS IDENTITY (START WITH ${sequenceStart}) UNIQUE`;
|
||||
return sql;
|
||||
}
|
||||
// 处理枚举类型
|
||||
if (type === 'enum') {
|
||||
(0, assert_1.default)(enumeration, 'Enum type requires enumeration values');
|
||||
// 注意:这里返回的是占位符,实际的枚举类型名在 translateCreateEntity 中确定
|
||||
// 为了比较一致性,我们使用枚举值的字符串表示
|
||||
sql += `enum(${enumeration.map(v => `'${v}'`).join(',')})`;
|
||||
}
|
||||
else {
|
||||
sql += this.populateDataTypeDef(type, params, enumeration);
|
||||
}
|
||||
// NOT NULL 约束
|
||||
if (notNull || type === 'geometry') {
|
||||
sql += ' NOT NULL';
|
||||
}
|
||||
// UNIQUE 约束
|
||||
if (unique) {
|
||||
sql += ' UNIQUE';
|
||||
}
|
||||
// 默认值
|
||||
if (defaultValue !== undefined && !sequenceStart) {
|
||||
(0, assert_1.default)(type !== 'ref', 'ref type should not have default value');
|
||||
sql += ` DEFAULT ${this.translateAttrValue(type, defaultValue)}`;
|
||||
}
|
||||
// 主键
|
||||
if (attr === 'id') {
|
||||
sql += ' PRIMARY KEY';
|
||||
}
|
||||
return sql;
|
||||
}
|
||||
/**
|
||||
* 比较两个 SQL 语句是否等价(用于 schema diff)
|
||||
* 忽略空格、大小写等格式差异
|
||||
*/
|
||||
compareSql(sql1, sql2) {
|
||||
// 标准化 SQL:移除多余空格,统一大小写
|
||||
const normalize = (sql) => {
|
||||
return sql
|
||||
.replace(/\s+/g, ' ') // 多个空格合并为一个
|
||||
.replace(/\(\s+/g, '(') // 移除括号后的空格
|
||||
.replace(/\s+\)/g, ')') // 移除括号前的空格
|
||||
.replace(/,\s+/g, ',') // 移除逗号后的空格
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
};
|
||||
return normalize(sql1) === normalize(sql2);
|
||||
}
|
||||
}
|
||||
exports.PostgreSQLTranslator = PostgreSQLTranslator;
|
||||
|
|
|
|||
|
|
@ -1,8 +1,32 @@
|
|||
import { EntityDict } from "oak-domain/lib/base-app-domain";
|
||||
import { Attribute, EntityDict, Index, OperateOption, OperationResult, StorageSchema, TxnOption } from 'oak-domain/lib/types';
|
||||
import { EntityDict as BaseEntityDict } from 'oak-domain/lib/base-app-domain';
|
||||
import { AsyncContext, AsyncRowStore } from "oak-domain/lib/store/AsyncRowStore";
|
||||
import { CreateEntityOption } from "./Translator";
|
||||
export interface DbStore<ED extends EntityDict, Cxt extends AsyncContext<ED>> extends AsyncRowStore<ED, Cxt> {
|
||||
import { AggregationResult, SelectOption } from "oak-domain/lib/types";
|
||||
export type Plan = {
|
||||
newTables: Record<string, {
|
||||
attributes: Record<string, Attribute>;
|
||||
}>;
|
||||
newIndexes: Record<string, Index<any>[]>;
|
||||
updatedTables: Record<string, {
|
||||
attributes: Record<string, Attribute & {
|
||||
isNew: boolean;
|
||||
}>;
|
||||
}>;
|
||||
updatedIndexes: Record<string, Index<any>[]>;
|
||||
};
|
||||
export interface DbStore<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>> extends AsyncRowStore<ED, Cxt> {
|
||||
connect: () => Promise<void>;
|
||||
disconnect: () => Promise<void>;
|
||||
initialize(options: CreateEntityOption): Promise<void>;
|
||||
aggregate<T extends keyof ED, OP extends SelectOption>(entity: T, aggregation: ED[T]['Aggregation'], context: Cxt, option: OP): Promise<AggregationResult<ED[T]['Schema']>>;
|
||||
operate<T extends keyof ED>(entity: T, operation: ED[T]['Operation'], context: Cxt, option: OperateOption): Promise<OperationResult<ED>>;
|
||||
select<T extends keyof ED>(entity: T, selection: ED[T]['Selection'], context: Cxt, option: SelectOption): Promise<Partial<ED[T]['Schema']>[]>;
|
||||
count<T extends keyof ED>(entity: T, selection: Pick<ED[T]['Selection'], 'filter' | 'count'>, context: Cxt, option: SelectOption): Promise<number>;
|
||||
begin(option?: TxnOption): Promise<string>;
|
||||
commit(txnId: string): Promise<void>;
|
||||
rollback(txnId: string): Promise<void>;
|
||||
readSchema(): Promise<StorageSchema<ED>>;
|
||||
makeUpgradePlan(): Promise<Plan>;
|
||||
diffSchema(schemaOld: StorageSchema<any>, schemaNew: StorageSchema<any>): Plan;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import { AsyncContext, AsyncRowStore } from 'oak-domain/lib/store/AsyncRowStore'
|
|||
import { SyncContext } from 'oak-domain/lib/store/SyncRowStore';
|
||||
import { CreateEntityOption } from '../types/Translator';
|
||||
import { FieldPacket, ResultSetHeader, RowDataPacket } from 'mysql2';
|
||||
import { DbStore } from '../types/dbStore';
|
||||
import { DbStore, Plan } from '../types/dbStore';
|
||||
|
||||
|
||||
function convertGeoTextToObject(geoText: string): Geo {
|
||||
|
|
@ -466,16 +466,4 @@ export class MysqlStore<ED extends EntityDict & BaseEntityDict, Cxt extends Asyn
|
|||
|
||||
return plan;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
type Plan = {
|
||||
newTables: Record<string, {
|
||||
attributes: Record<string, Attribute>;
|
||||
}>;
|
||||
newIndexes: Record<string, Index<any>[]>;
|
||||
updatedTables: Record<string, {
|
||||
attributes: Record<string, Attribute & { isNew: boolean }>;
|
||||
}>;
|
||||
updatedIndexes: Record<string, Index<any>[]>;
|
||||
};
|
||||
}
|
||||
|
|
@ -109,6 +109,16 @@ export class MySqlTranslator<ED extends EntityDict & BaseEntityDict> extends Sql
|
|||
private makeUpSchema() {
|
||||
for (const entity in this.schema) {
|
||||
const { attributes, indexes } = this.schema[entity];
|
||||
// 非特殊索引自动添加 $$deleteAt$$ (by qcqcqc)
|
||||
for (const index of indexes || []) {
|
||||
if (index.config?.type) {
|
||||
continue;
|
||||
}
|
||||
const indexAttrNames = index.attributes.map(attr => attr.name);
|
||||
if (!indexAttrNames.includes('$$deleteAt$$')) {
|
||||
index.attributes.push({ name: '$$deleteAt$$' });
|
||||
}
|
||||
}
|
||||
const geoIndexes: Index<ED[keyof ED]['OpSchema']>[] = [];
|
||||
for (const attr in attributes) {
|
||||
if (attributes[attr].type === 'geometry') {
|
||||
|
|
@ -971,8 +981,10 @@ export class MySqlTranslator<ED extends EntityDict & BaseEntityDict> extends Sql
|
|||
const refAttr = (expr)['#refAttr'];
|
||||
|
||||
assert(refDict[refId]);
|
||||
const attrText = `\`${refDict[refId][0]}\`.\`${refAttr}\``;
|
||||
result = this.translateAttrInExpression(entity, (expr)['#refAttr'], attrText);
|
||||
const [refAlias, refEntity] = refDict[refId];
|
||||
const attrText = `\`${refAlias}\`.\`${refAttr}\``;
|
||||
// 这里必须使用refEntity,否则在filter深层嵌套节点表达式时会出现entity不对应
|
||||
result = this.translateAttrInExpression(refEntity as T, (expr)['#refAttr'], attrText);
|
||||
}
|
||||
else {
|
||||
assert(k.length === 1);
|
||||
|
|
|
|||
|
|
@ -155,17 +155,6 @@ export class PostgreSQLConnector {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 执行多条 SQL 语句(用于初始化等场景)
|
||||
*/
|
||||
async execBatch(sqls: string[], txn?: string): Promise<void> {
|
||||
for (const sql of sqls) {
|
||||
if (sql.trim()) {
|
||||
await this.exec(sql, txn);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取连接池状态
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -1,12 +1,15 @@
|
|||
import {
|
||||
EntityDict,
|
||||
OperateOption,
|
||||
OperationResult,
|
||||
TxnOption,
|
||||
StorageSchema,
|
||||
SelectOption,
|
||||
AggregationResult,
|
||||
Geo
|
||||
import {
|
||||
EntityDict,
|
||||
OperateOption,
|
||||
OperationResult,
|
||||
TxnOption,
|
||||
StorageSchema,
|
||||
SelectOption,
|
||||
AggregationResult,
|
||||
Geo,
|
||||
IndexConfig,
|
||||
Index,
|
||||
Attribute
|
||||
} from 'oak-domain/lib/types';
|
||||
import { EntityDict as BaseEntityDict } from 'oak-domain/lib/base-app-domain';
|
||||
import { CascadeStore } from 'oak-domain/lib/store/CascadeStore';
|
||||
|
|
@ -20,7 +23,14 @@ import { AsyncContext, AsyncRowStore } from 'oak-domain/lib/store/AsyncRowStore'
|
|||
import { SyncContext } from 'oak-domain/lib/store/SyncRowStore';
|
||||
import { CreateEntityOption } from '../types/Translator';
|
||||
import { QueryResult } from 'pg';
|
||||
import { DbStore } from '../types/dbStore';
|
||||
import { DbStore, Plan } from '../types/dbStore';
|
||||
|
||||
const ToNumberAttrs = new Set([
|
||||
'$$seq$$',
|
||||
'$$createAt$$',
|
||||
'$$updateAt$$',
|
||||
'$$deleteAt$$',
|
||||
]);
|
||||
|
||||
function convertGeoTextToObject(geoText: string): Geo {
|
||||
if (geoText.startsWith('POINT')) {
|
||||
|
|
@ -59,119 +69,119 @@ function convertGeoTextToObject(geoText: string): Geo {
|
|||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
throw new Error(`Unsupported geometry type: ${geoText.slice(0, 50)}`);
|
||||
}
|
||||
|
||||
export class PostgreSQLStore<
|
||||
ED extends EntityDict & BaseEntityDict,
|
||||
ED extends EntityDict & BaseEntityDict,
|
||||
Cxt extends AsyncContext<ED>
|
||||
> extends CascadeStore<ED> implements DbStore<ED, Cxt> {
|
||||
|
||||
|
||||
protected countAbjointRow<T extends keyof ED, OP extends SelectOption, Cxt extends SyncContext<ED>>(
|
||||
entity: T,
|
||||
selection: Pick<ED[T]['Selection'], 'filter' | 'count'>,
|
||||
context: Cxt,
|
||||
entity: T,
|
||||
selection: Pick<ED[T]['Selection'], 'filter' | 'count'>,
|
||||
context: Cxt,
|
||||
option: OP
|
||||
): number {
|
||||
throw new Error('PostgreSQL store 不支持同步取数据');
|
||||
}
|
||||
|
||||
|
||||
protected aggregateAbjointRowSync<T extends keyof ED, OP extends SelectOption, Cxt extends SyncContext<ED>>(
|
||||
entity: T,
|
||||
aggregation: ED[T]['Aggregation'],
|
||||
context: Cxt,
|
||||
entity: T,
|
||||
aggregation: ED[T]['Aggregation'],
|
||||
context: Cxt,
|
||||
option: OP
|
||||
): AggregationResult<ED[T]['Schema']> {
|
||||
throw new Error('PostgreSQL store 不支持同步取数据');
|
||||
}
|
||||
|
||||
|
||||
protected selectAbjointRow<T extends keyof ED, OP extends SelectOption>(
|
||||
entity: T,
|
||||
selection: ED[T]['Selection'],
|
||||
context: SyncContext<ED>,
|
||||
entity: T,
|
||||
selection: ED[T]['Selection'],
|
||||
context: SyncContext<ED>,
|
||||
option: OP
|
||||
): Partial<ED[T]['Schema']>[] {
|
||||
throw new Error('PostgreSQL store 不支持同步取数据');
|
||||
}
|
||||
|
||||
|
||||
protected updateAbjointRow<T extends keyof ED, OP extends OperateOption>(
|
||||
entity: T,
|
||||
operation: ED[T]['Operation'],
|
||||
context: SyncContext<ED>,
|
||||
entity: T,
|
||||
operation: ED[T]['Operation'],
|
||||
context: SyncContext<ED>,
|
||||
option: OP
|
||||
): number {
|
||||
throw new Error('PostgreSQL store 不支持同步更新数据');
|
||||
}
|
||||
|
||||
|
||||
async exec(script: string, txnId?: string) {
|
||||
await this.connector.exec(script, txnId);
|
||||
}
|
||||
|
||||
|
||||
connector: PostgreSQLConnector;
|
||||
translator: PostgreSQLTranslator<ED>;
|
||||
|
||||
|
||||
constructor(storageSchema: StorageSchema<ED>, configuration: PostgreSQLConfiguration) {
|
||||
super(storageSchema);
|
||||
this.connector = new PostgreSQLConnector(configuration);
|
||||
this.translator = new PostgreSQLTranslator(storageSchema);
|
||||
}
|
||||
|
||||
|
||||
checkRelationAsync<T extends keyof ED, Cxt extends AsyncContext<ED>>(
|
||||
entity: T,
|
||||
operation: Omit<ED[T]['Operation'] | ED[T]['Selection'], 'id'>,
|
||||
entity: T,
|
||||
operation: Omit<ED[T]['Operation'] | ED[T]['Selection'], 'id'>,
|
||||
context: Cxt
|
||||
): Promise<void> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
|
||||
|
||||
protected async aggregateAbjointRowAsync<T extends keyof ED, OP extends SelectOption, Cxt extends AsyncContext<ED>>(
|
||||
entity: T,
|
||||
aggregation: ED[T]['Aggregation'],
|
||||
context: Cxt,
|
||||
entity: T,
|
||||
aggregation: ED[T]['Aggregation'],
|
||||
context: Cxt,
|
||||
option: OP
|
||||
): Promise<AggregationResult<ED[T]['Schema']>> {
|
||||
const sql = this.translator.translateAggregate(entity, aggregation, option);
|
||||
const result = await this.connector.exec(sql, context.getCurrentTxnId());
|
||||
return this.formResult(entity, result[0]);
|
||||
}
|
||||
|
||||
|
||||
aggregate<T extends keyof ED, OP extends SelectOption>(
|
||||
entity: T,
|
||||
aggregation: ED[T]['Aggregation'],
|
||||
context: Cxt,
|
||||
entity: T,
|
||||
aggregation: ED[T]['Aggregation'],
|
||||
context: Cxt,
|
||||
option: OP
|
||||
): Promise<AggregationResult<ED[T]['Schema']>> {
|
||||
return this.aggregateAsync(entity, aggregation, context, option);
|
||||
}
|
||||
|
||||
|
||||
protected supportManyToOneJoin(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
protected supportMultipleCreate(): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
private formResult<T extends keyof ED>(entity: T, result: any): any {
|
||||
const schema = this.getSchema();
|
||||
|
||||
|
||||
function resolveAttribute<E extends keyof ED>(
|
||||
entity2: E,
|
||||
r: Record<string, any>,
|
||||
attr: string,
|
||||
entity2: E,
|
||||
r: Record<string, any>,
|
||||
attr: string,
|
||||
value: any
|
||||
) {
|
||||
const { attributes, view } = schema[entity2];
|
||||
|
||||
|
||||
if (!view) {
|
||||
const i = attr.indexOf(".");
|
||||
|
||||
|
||||
if (i !== -1) {
|
||||
const attrHead = attr.slice(0, i);
|
||||
const attrTail = attr.slice(i + 1);
|
||||
const rel = judgeRelation(schema, entity2, attrHead);
|
||||
|
||||
|
||||
if (rel === 1) {
|
||||
set(r, attr, value);
|
||||
} else {
|
||||
|
|
@ -190,14 +200,20 @@ export class PostgreSQLStore<
|
|||
}
|
||||
} else if (attributes[attr]) {
|
||||
const { type } = attributes[attr];
|
||||
|
||||
|
||||
switch (type) {
|
||||
case 'date':
|
||||
case 'time': {
|
||||
case 'time':
|
||||
case 'datetime': {
|
||||
if (value instanceof Date) {
|
||||
r[attr] = value.valueOf();
|
||||
} else {
|
||||
r[attr] = value;
|
||||
if (typeof value === 'string') {
|
||||
r[attr] = parseInt(value, 10);
|
||||
} else {
|
||||
assert(typeof value === 'number' || value === null);
|
||||
r[attr] = value;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
|
@ -266,6 +282,14 @@ export class PostgreSQLStore<
|
|||
} else if (attr.startsWith("#count")) {
|
||||
// PostgreSQL count 返回字符串
|
||||
r[attr] = parseInt(value, 10);
|
||||
}
|
||||
else if (attr.startsWith("#sum") || attr.startsWith("#avg") || attr.startsWith("#min") || attr.startsWith("#max")) {
|
||||
// PostgreSQL sum/avg/min/max 返回字符串
|
||||
r[attr] = parseFloat(value);
|
||||
}
|
||||
else if (ToNumberAttrs.has(attr)) {
|
||||
// PostgreSQL sum/avg/min/max 返回字符串
|
||||
r[attr] = parseInt(value, 10);
|
||||
} else {
|
||||
r[attr] = value;
|
||||
}
|
||||
|
|
@ -281,7 +305,7 @@ export class PostgreSQLStore<
|
|||
function removeNullObjects<E extends keyof ED>(r: Record<string, any>, e: E) {
|
||||
for (let attr in r) {
|
||||
const rel = judgeRelation(schema, e, attr);
|
||||
|
||||
|
||||
if (rel === 2) {
|
||||
if (r[attr].id === null) {
|
||||
assert(schema[e].toModi || r.entity !== attr);
|
||||
|
|
@ -305,7 +329,7 @@ export class PostgreSQLStore<
|
|||
|
||||
function formSingleRow(r: any): any {
|
||||
let result2: Record<string, any> = {};
|
||||
|
||||
|
||||
for (let attr in r) {
|
||||
const value = r[attr];
|
||||
resolveAttribute(entity, result2, attr, value);
|
||||
|
|
@ -320,7 +344,7 @@ export class PostgreSQLStore<
|
|||
}
|
||||
return formSingleRow(result);
|
||||
}
|
||||
|
||||
|
||||
protected async selectAbjointRowAsync<T extends keyof ED>(
|
||||
entity: T,
|
||||
selection: ED[T]['Selection'],
|
||||
|
|
@ -331,7 +355,7 @@ export class PostgreSQLStore<
|
|||
const result = await this.connector.exec(sql, context.getCurrentTxnId());
|
||||
return this.formResult(entity, result[0]);
|
||||
}
|
||||
|
||||
|
||||
protected async updateAbjointRowAsync<T extends keyof ED>(
|
||||
entity: T,
|
||||
operation: ED[T]['Operation'],
|
||||
|
|
@ -363,81 +387,265 @@ export class PostgreSQLStore<
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async operate<T extends keyof ED>(
|
||||
entity: T,
|
||||
operation: ED[T]['Operation'],
|
||||
context: Cxt,
|
||||
entity: T,
|
||||
operation: ED[T]['Operation'],
|
||||
context: Cxt,
|
||||
option: OperateOption
|
||||
): Promise<OperationResult<ED>> {
|
||||
const { action } = operation;
|
||||
assert(!['select', 'download', 'stat'].includes(action), '不支持使用 select operation');
|
||||
return await super.operateAsync(entity, operation as any, context, option);
|
||||
}
|
||||
|
||||
|
||||
async select<T extends keyof ED>(
|
||||
entity: T,
|
||||
selection: ED[T]['Selection'],
|
||||
context: Cxt,
|
||||
entity: T,
|
||||
selection: ED[T]['Selection'],
|
||||
context: Cxt,
|
||||
option: SelectOption
|
||||
): Promise<Partial<ED[T]['Schema']>[]> {
|
||||
return await super.selectAsync(entity, selection, context, option);
|
||||
}
|
||||
|
||||
|
||||
protected async countAbjointRowAsync<T extends keyof ED>(
|
||||
entity: T,
|
||||
selection: Pick<ED[T]['Selection'], 'filter' | 'count'>,
|
||||
context: AsyncContext<ED>,
|
||||
entity: T,
|
||||
selection: Pick<ED[T]['Selection'], 'filter' | 'count'>,
|
||||
context: AsyncContext<ED>,
|
||||
option: SelectOption
|
||||
): Promise<number> {
|
||||
const sql = this.translator.translateCount(entity, selection, option);
|
||||
const result = await this.connector.exec(sql, context.getCurrentTxnId());
|
||||
|
||||
|
||||
// PostgreSQL 返回的 count 是 string 类型(bigint)
|
||||
const cnt = result[0][0]?.cnt;
|
||||
return typeof cnt === 'string' ? parseInt(cnt, 10) : (cnt || 0);
|
||||
}
|
||||
|
||||
|
||||
async count<T extends keyof ED>(
|
||||
entity: T,
|
||||
selection: Pick<ED[T]['Selection'], 'filter' | 'count'>,
|
||||
context: Cxt,
|
||||
entity: T,
|
||||
selection: Pick<ED[T]['Selection'], 'filter' | 'count'>,
|
||||
context: Cxt,
|
||||
option: SelectOption
|
||||
) {
|
||||
return this.countAsync(entity, selection, context, option);
|
||||
}
|
||||
|
||||
|
||||
async begin(option?: TxnOption): Promise<string> {
|
||||
return await this.connector.startTransaction(option);
|
||||
}
|
||||
|
||||
|
||||
async commit(txnId: string): Promise<void> {
|
||||
await this.connector.commitTransaction(txnId);
|
||||
}
|
||||
|
||||
|
||||
async rollback(txnId: string): Promise<void> {
|
||||
await this.connector.rollbackTransaction(txnId);
|
||||
}
|
||||
|
||||
|
||||
async connect() {
|
||||
await this.connector.connect();
|
||||
}
|
||||
|
||||
|
||||
async disconnect() {
|
||||
await this.connector.disconnect();
|
||||
}
|
||||
|
||||
|
||||
async initialize(option: CreateEntityOption) {
|
||||
const schema = this.getSchema();
|
||||
|
||||
// 可选:先创建 PostGIS 扩展
|
||||
// await this.connector.exec('CREATE EXTENSION IF NOT EXISTS postgis;');
|
||||
|
||||
for (const entity in schema) {
|
||||
const sqls = this.translator.translateCreateEntity(entity, option);
|
||||
for (const sql of sqls) {
|
||||
await this.connector.exec(sql);
|
||||
// PG的DDL支持事务,所以这里直接用一个事务包裹所有的初始化操作
|
||||
const txn = await this.connector.startTransaction({
|
||||
isolationLevel: 'serializable',
|
||||
});
|
||||
try {
|
||||
|
||||
const schema = this.getSchema();
|
||||
|
||||
let hasGeoType = false;
|
||||
let hasChineseTsConfig = false;
|
||||
|
||||
for (const entity in schema) {
|
||||
const { attributes, indexes } = schema[entity];
|
||||
for (const attr in attributes) {
|
||||
const { type } = attributes[attr];
|
||||
if (type === 'geometry') {
|
||||
hasGeoType = true;
|
||||
}
|
||||
}
|
||||
for (const index of indexes || []) {
|
||||
if (index.config?.tsConfig === 'chinese' || index.config?.tsConfig?.includes('chinese')) {
|
||||
hasChineseTsConfig = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hasGeoType) {
|
||||
console.log('Initializing PostGIS extension for geometry support...');
|
||||
await this.connector.exec('CREATE EXTENSION IF NOT EXISTS postgis;');
|
||||
}
|
||||
|
||||
if (hasChineseTsConfig) {
|
||||
console.log('Initializing Chinese text search configuration...');
|
||||
const checkChineseConfigSql = `
|
||||
SELECT COUNT(*) as cnt
|
||||
FROM pg_catalog.pg_ts_config
|
||||
WHERE cfgname = 'chinese';
|
||||
`;
|
||||
const result = await this.connector.exec(checkChineseConfigSql);
|
||||
const count = parseInt(result[0][0]?.cnt || '0', 10);
|
||||
if (count === 0) {
|
||||
const createChineseConfigSql = `
|
||||
CREATE EXTENSION IF NOT EXISTS zhparser;
|
||||
CREATE TEXT SEARCH CONFIGURATION chinese (PARSER = zhparser);
|
||||
ALTER TEXT SEARCH CONFIGURATION chinese ADD MAPPING FOR n,v,a,i,e,l WITH simple;
|
||||
`;
|
||||
await this.connector.exec(createChineseConfigSql);
|
||||
}
|
||||
}
|
||||
|
||||
for (const entity in schema) {
|
||||
const sqls = this.translator.translateCreateEntity(entity, option);
|
||||
for (const sql of sqls) {
|
||||
await this.connector.exec(sql, txn);
|
||||
}
|
||||
}
|
||||
await this.connector.commitTransaction(txn);
|
||||
} catch (error) {
|
||||
await this.connector.rollbackTransaction(txn);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// 从数据库中读取当前schema
|
||||
readSchema() {
|
||||
return this.translator.readSchema((sql) => this.connector.exec(sql));
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据载入的dataSchema,和数据库中原来的schema,决定如何来upgrade
|
||||
* 制订出来的plan分为两阶段:增加阶段和削减阶段,在两个阶段之间,由用户来修正数据
|
||||
*/
|
||||
async makeUpgradePlan() {
|
||||
const originSchema = await this.readSchema();
|
||||
const plan = this.diffSchema(originSchema, this.translator.schema);
|
||||
return plan;
|
||||
}
|
||||
|
||||
/**
|
||||
* 比较两个schema的不同,这里计算的是new对old的增量
|
||||
* @param schemaOld
|
||||
* @param schemaNew
|
||||
*/
|
||||
diffSchema(schemaOld: StorageSchema<any>, schemaNew: StorageSchema<any>) {
|
||||
const plan: Plan = {
|
||||
newTables: {},
|
||||
newIndexes: {},
|
||||
updatedIndexes: {},
|
||||
updatedTables: {},
|
||||
};
|
||||
|
||||
for (const table in schemaNew) {
|
||||
// PostgreSQL 表名区分大小写(使用双引号时)
|
||||
if (schemaOld[table]) {
|
||||
const { attributes, indexes } = schemaOld[table];
|
||||
const { attributes: attributesNew, indexes: indexesNew } = schemaNew[table];
|
||||
|
||||
const assignToUpdateTables = (attr: string, isNew: boolean) => {
|
||||
const skipAttrs = ['$$seq$$', '$$createAt$$', '$$updateAt$$', '$$deleteAt$$', 'id'];
|
||||
if (skipAttrs.includes(attr)) {
|
||||
return;
|
||||
}
|
||||
if (!plan.updatedTables[table]) {
|
||||
plan.updatedTables[table] = {
|
||||
attributes: {
|
||||
[attr]: {
|
||||
...attributesNew[attr],
|
||||
isNew,
|
||||
}
|
||||
}
|
||||
};
|
||||
} else {
|
||||
plan.updatedTables[table].attributes[attr] = {
|
||||
...attributesNew[attr],
|
||||
isNew,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
for (const attr in attributesNew) {
|
||||
if (attributes[attr]) {
|
||||
// 比较两次创建的属性定义是否一致
|
||||
const sql1 = this.translator.translateAttributeDef(attr, attributesNew[attr]);
|
||||
const sql2 = this.translator.translateAttributeDef(attr, attributes[attr]);
|
||||
if (!this.translator.compareSql(sql1, sql2)) {
|
||||
assignToUpdateTables(attr, false);
|
||||
}
|
||||
} else {
|
||||
assignToUpdateTables(attr, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (indexesNew) {
|
||||
const assignToIndexes = (index: Index<any>, isNew: boolean) => {
|
||||
if (isNew) {
|
||||
if (plan.newIndexes[table]) {
|
||||
plan.newIndexes[table].push(index);
|
||||
} else {
|
||||
plan.newIndexes[table] = [index];
|
||||
}
|
||||
} else {
|
||||
if (plan.updatedIndexes[table]) {
|
||||
plan.updatedIndexes[table].push(index);
|
||||
} else {
|
||||
plan.updatedIndexes[table] = [index];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const compareConfig = (config1?: IndexConfig, config2?: IndexConfig) => {
|
||||
const unique1 = config1?.unique || false;
|
||||
const unique2 = config2?.unique || false;
|
||||
if (unique1 !== unique2) {
|
||||
return false;
|
||||
}
|
||||
const type1 = config1?.type || 'btree';
|
||||
const type2 = config2?.type || 'btree';
|
||||
|
||||
// tsConfig 比较
|
||||
const tsConfig1 = config1?.tsConfig;
|
||||
const tsConfig2 = config2?.tsConfig;
|
||||
if (JSON.stringify(tsConfig1) !== JSON.stringify(tsConfig2)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return type1 === type2;
|
||||
};
|
||||
|
||||
for (const index of indexesNew) {
|
||||
const { name, config, attributes: indexAttrs } = index;
|
||||
const origin = indexes?.find(ele => ele.name === name);
|
||||
if (origin) {
|
||||
if (JSON.stringify(indexAttrs) !== JSON.stringify(origin.attributes)) {
|
||||
assignToIndexes(index, false);
|
||||
} else {
|
||||
if (!compareConfig(config, origin.config)) {
|
||||
assignToIndexes(index, false);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
assignToIndexes(index, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
plan.newTables[table] = {
|
||||
attributes: schemaNew[table].attributes,
|
||||
};
|
||||
if (schemaNew[table].indexes) {
|
||||
plan.newIndexes[table] = schemaNew[table].indexes!;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return plan;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import assert from 'assert';
|
||||
import { format } from 'util';
|
||||
import { assign, difference } from 'lodash';
|
||||
import { EntityDict, Geo, Q_FullTextValue, RefOrExpression, Ref, StorageSchema, Index, RefAttr, DeleteAtAttribute, TriggerDataAttribute, TriggerUuidAttribute, UpdateAtAttribute } from "oak-domain/lib/types";
|
||||
import { EntityDict, Geo, Q_FullTextValue, RefOrExpression, Ref, StorageSchema, Index, RefAttr, DeleteAtAttribute, TriggerDataAttribute, TriggerUuidAttribute, UpdateAtAttribute, Attribute, Attributes } from "oak-domain/lib/types";
|
||||
import { EntityDict as BaseEntityDict } from 'oak-domain/lib/base-app-domain';
|
||||
import { DataType, DataTypeParams } from "oak-domain/lib/types/schema/DataTypes";
|
||||
import { SqlOperateOption, SqlSelectOption, SqlTranslator } from "../sqlTranslator";
|
||||
|
|
@ -210,6 +210,16 @@ export class PostgreSQLTranslator<ED extends EntityDict & BaseEntityDict> extend
|
|||
private makeUpSchema() {
|
||||
for (const entity in this.schema) {
|
||||
const { attributes, indexes } = this.schema[entity];
|
||||
// 非特殊索引自动添加 $$deleteAt$$
|
||||
for (const index of indexes || []) {
|
||||
if (index.config?.type) {
|
||||
continue;
|
||||
}
|
||||
const indexAttrNames = index.attributes.map(attr => attr.name);
|
||||
if (!indexAttrNames.includes('$$deleteAt$$')) {
|
||||
index.attributes.push({ name: '$$deleteAt$$' });
|
||||
}
|
||||
}
|
||||
const geoIndexes: Index<ED[keyof ED]['OpSchema']>[] = [];
|
||||
for (const attr in attributes) {
|
||||
if (attributes[attr].type === 'geometry') {
|
||||
|
|
@ -796,6 +806,7 @@ export class PostgreSQLTranslator<ED extends EntityDict & BaseEntityDict> extend
|
|||
else if (typeof value === 'number') {
|
||||
return `${value}`;
|
||||
}
|
||||
assert(typeof value === 'string', 'Invalid date/time value');
|
||||
return `'${(new Date(value)).valueOf()}'`;
|
||||
}
|
||||
case 'object':
|
||||
|
|
@ -1066,15 +1077,10 @@ export class PostgreSQLTranslator<ED extends EntityDict & BaseEntityDict> extend
|
|||
indexSql += '(';
|
||||
|
||||
const indexColumns: string[] = [];
|
||||
let includeDeleteAt = false;
|
||||
|
||||
for (const indexAttr of indexAttrs) {
|
||||
const { name: attrName, direction } = indexAttr;
|
||||
|
||||
if (attrName === '$$deleteAt$$') {
|
||||
includeDeleteAt = true;
|
||||
}
|
||||
|
||||
if (indexType === 'fulltext') {
|
||||
// 全文索引:使用 to_tsvector
|
||||
indexColumns.push(`to_tsvector('${tsLang}', COALESCE("${attrName as string}", ''))`);
|
||||
|
|
@ -1088,11 +1094,6 @@ export class PostgreSQLTranslator<ED extends EntityDict & BaseEntityDict> extend
|
|||
}
|
||||
}
|
||||
|
||||
// 非特殊索引自动包含 deleteAt
|
||||
if (!includeDeleteAt && !indexType) {
|
||||
indexColumns.push('"$$deleteAt$$"');
|
||||
}
|
||||
|
||||
indexSql += indexColumns.join(', ');
|
||||
indexSql += ');';
|
||||
|
||||
|
|
@ -2038,4 +2039,431 @@ export class PostgreSQLTranslator<ED extends EntityDict & BaseEntityDict> extend
|
|||
// 这个方法不应该被直接调用了,因为translateRemove已经重写
|
||||
throw new Error('populateRemoveStmt should not be called directly in PostgreSQL. Use translateRemove instead.');
|
||||
}
|
||||
|
||||
/**
|
||||
* 将 PostgreSQL 返回的 Type 回译成 oak 的类型,是 populateDataTypeDef 的反函数
|
||||
* @param type PostgreSQL 类型字符串
|
||||
*/
|
||||
private reTranslateToAttribute(type: string): Attribute {
|
||||
// 处理带长度的类型:character varying(255), character(10)
|
||||
const varcharMatch = /^character varying\((\d+)\)$/.exec(type);
|
||||
if (varcharMatch) {
|
||||
return {
|
||||
type: 'varchar',
|
||||
params: {
|
||||
length: parseInt(varcharMatch[1], 10),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const charMatch = /^character\((\d+)\)$/.exec(type);
|
||||
if (charMatch) {
|
||||
return {
|
||||
type: 'char',
|
||||
params: {
|
||||
length: parseInt(charMatch[1], 10),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// 处理带精度和小数位的类型:numeric(10,2)
|
||||
const numericWithScaleMatch = /^numeric\((\d+),(\d+)\)$/.exec(type);
|
||||
if (numericWithScaleMatch) {
|
||||
return {
|
||||
type: 'decimal',
|
||||
params: {
|
||||
precision: parseInt(numericWithScaleMatch[1], 10),
|
||||
scale: parseInt(numericWithScaleMatch[2], 10),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// 处理只带精度的类型:numeric(10), timestamp(6)
|
||||
const numericMatch = /^numeric\((\d+)\)$/.exec(type);
|
||||
if (numericMatch) {
|
||||
return {
|
||||
type: 'decimal',
|
||||
params: {
|
||||
precision: parseInt(numericMatch[1], 10),
|
||||
scale: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const timestampMatch = /^timestamp\((\d+)\) without time zone$/.exec(type);
|
||||
if (timestampMatch) {
|
||||
return {
|
||||
type: 'timestamp',
|
||||
params: {
|
||||
precision: parseInt(timestampMatch[1], 10),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const timeMatch = /^time\((\d+)\) without time zone$/.exec(type);
|
||||
if (timeMatch) {
|
||||
return {
|
||||
type: 'time',
|
||||
params: {
|
||||
precision: parseInt(timeMatch[1], 10),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// PostgreSQL 类型映射到 oak 类型
|
||||
const typeMap: Record<string, DataType> = {
|
||||
'bigint': 'bigint',
|
||||
'integer': 'integer',
|
||||
'smallint': 'smallint',
|
||||
'real': 'real',
|
||||
'double precision': 'double precision',
|
||||
'boolean': 'boolean',
|
||||
'text': 'text',
|
||||
'jsonb': 'object', // 框架使用 jsonb 存储 object/array
|
||||
'json': 'object',
|
||||
'bytea': 'bytea',
|
||||
'character varying': 'varchar',
|
||||
'character': 'char',
|
||||
'timestamp without time zone': 'timestamp',
|
||||
'time without time zone': 'time',
|
||||
'date': 'date',
|
||||
'uuid': 'uuid',
|
||||
'geometry': 'geometry',
|
||||
};
|
||||
|
||||
const mappedType = typeMap[type];
|
||||
if (mappedType) {
|
||||
return { type: mappedType };
|
||||
}
|
||||
|
||||
// 如果是用户定义的枚举类型,返回 enum(具体值需要额外查询)
|
||||
// 这里先返回基础类型,枚举值在 readSchema 中单独处理
|
||||
return { type: type as DataType };
|
||||
}
|
||||
|
||||
/**
|
||||
* 从 PostgreSQL 数据库读取当前的 schema 结构
|
||||
*/
|
||||
async readSchema(execFn: (sql: string) => Promise<any>): Promise<StorageSchema<ED>> {
|
||||
const result: StorageSchema<ED> = {} as StorageSchema<ED>;
|
||||
|
||||
// 1. 获取所有表
|
||||
const tablesSql = `
|
||||
SELECT tablename
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
ORDER BY tablename;
|
||||
`;
|
||||
const [tablesResult] = await execFn(tablesSql);
|
||||
|
||||
for (const tableRow of tablesResult) {
|
||||
const tableName = tableRow.tablename;
|
||||
|
||||
// 2. 获取表的列信息
|
||||
const columnsSql = `
|
||||
SELECT
|
||||
column_name,
|
||||
data_type,
|
||||
character_maximum_length,
|
||||
numeric_precision,
|
||||
numeric_scale,
|
||||
is_nullable,
|
||||
column_default,
|
||||
udt_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = '${tableName}'
|
||||
ORDER BY ordinal_position;
|
||||
`;
|
||||
const [columnsResult] = await execFn(columnsSql);
|
||||
|
||||
const attributes: Attributes<any> = {};
|
||||
|
||||
for (const col of columnsResult) {
|
||||
const {
|
||||
column_name: colName,
|
||||
data_type: dataType,
|
||||
character_maximum_length: maxLength,
|
||||
numeric_precision: precision,
|
||||
numeric_scale: scale,
|
||||
is_nullable: isNullable,
|
||||
column_default: defaultValue,
|
||||
udt_name: udtName,
|
||||
} = col;
|
||||
|
||||
let attr: Attribute;
|
||||
|
||||
// 处理用户定义类型(枚举)
|
||||
if (dataType === 'USER-DEFINED') {
|
||||
// 查询枚举值
|
||||
const enumSql = `
|
||||
SELECT e.enumlabel
|
||||
FROM pg_type t
|
||||
JOIN pg_enum e ON t.oid = e.enumtypid
|
||||
WHERE t.typname = '${udtName}'
|
||||
ORDER BY e.enumsortorder;
|
||||
`;
|
||||
const [enumResult] = await execFn(enumSql);
|
||||
const enumeration = enumResult.map((r: any) => r.enumlabel);
|
||||
|
||||
attr = {
|
||||
type: 'enum',
|
||||
enumeration,
|
||||
};
|
||||
} else {
|
||||
// 构建完整的类型字符串
|
||||
let fullType = dataType;
|
||||
if (maxLength) {
|
||||
fullType = `${dataType}(${maxLength})`;
|
||||
} else if (precision !== null && scale !== null) {
|
||||
fullType = `${dataType}(${precision},${scale})`;
|
||||
} else if (precision !== null) {
|
||||
fullType = `${dataType}(${precision})`;
|
||||
}
|
||||
|
||||
attr = this.reTranslateToAttribute(fullType);
|
||||
}
|
||||
|
||||
// 处理约束
|
||||
attr.notNull = isNullable === 'NO';
|
||||
|
||||
// 处理默认值(简化处理,复杂的默认值可能需要更精细的解析)
|
||||
if (defaultValue && !defaultValue.includes('nextval')) {
|
||||
// 跳过序列默认值
|
||||
// 简单处理:移除类型转换
|
||||
let cleanDefault = defaultValue.replace(/::[a-z]+/gi, '').replace(/'/g, '');
|
||||
if (cleanDefault === 'true') {
|
||||
attr.default = true;
|
||||
} else if (cleanDefault === 'false') {
|
||||
attr.default = false;
|
||||
} else if (!isNaN(Number(cleanDefault))) {
|
||||
attr.default = Number(cleanDefault);
|
||||
} else {
|
||||
attr.default = cleanDefault;
|
||||
}
|
||||
}
|
||||
|
||||
// 检查是否是序列列(IDENTITY)
|
||||
if (colName === '$$seq$$' || (defaultValue && defaultValue.includes('nextval'))) {
|
||||
attr.sequenceStart = 10000; // 默认起始值
|
||||
}
|
||||
|
||||
attributes[colName] = attr;
|
||||
}
|
||||
|
||||
// 3. 获取索引信息
|
||||
const indexesSql = `
|
||||
SELECT
|
||||
i.relname as index_name,
|
||||
ix.indisunique as is_unique,
|
||||
am.amname as index_type,
|
||||
pg_get_indexdef(ix.indexrelid) as index_def
|
||||
FROM pg_class t
|
||||
JOIN pg_index ix ON t.oid = ix.indrelid
|
||||
JOIN pg_class i ON i.oid = ix.indexrelid
|
||||
JOIN pg_am am ON i.relam = am.oid
|
||||
WHERE t.relname = '${tableName}'
|
||||
AND t.relkind = 'r'
|
||||
AND i.relname NOT LIKE '%_pkey'
|
||||
AND NOT ix.indisprimary
|
||||
ORDER BY i.relname;
|
||||
`;
|
||||
const [indexesResult] = await execFn(indexesSql) as [{
|
||||
index_name: string;
|
||||
is_unique: boolean;
|
||||
index_type: string;
|
||||
index_def: string;
|
||||
}[]]
|
||||
|
||||
if (indexesResult.length > 0) {
|
||||
const indexes: Index<any>[] = [];
|
||||
|
||||
for (const row of indexesResult) {
|
||||
const { index_name: indexName, is_unique: isUnique, index_type: indexType, index_def: indexDef } = row;
|
||||
|
||||
// 解析索引定义以获取列名和配置
|
||||
const index: Index<any> = {
|
||||
name: indexName,
|
||||
attributes: [],
|
||||
};
|
||||
|
||||
// 解析索引定义字符串
|
||||
// 示例: CREATE INDEX "user_index_fulltext_chinese" ON public."user" USING gin (to_tsvector('chinese'::regconfig, (COALESCE(name, ''::text) || ' '::text) || COALESCE(nickname, ''::text)))
|
||||
|
||||
if (indexType === 'gin' && indexDef.includes('to_tsvector')) {
|
||||
// 全文索引
|
||||
index.config = { type: 'fulltext' };
|
||||
|
||||
// 提取 tsConfig
|
||||
const tsConfigMatch = indexDef.match(/to_tsvector\('([^']+)'/);
|
||||
if (tsConfigMatch) {
|
||||
const tsConfig = tsConfigMatch[1];
|
||||
index.config.tsConfig = tsConfig;
|
||||
}
|
||||
|
||||
// 提取列名(从 COALESCE 中)
|
||||
const columnMatches = indexDef.matchAll(/COALESCE\("?([^",\s]+)"?/g);
|
||||
const columns = Array.from(columnMatches, m => m[1]);
|
||||
index.attributes = columns.map(col => ({ name: col }));
|
||||
|
||||
// 处理多语言索引的情况:移除语言后缀
|
||||
// 例如: user_index_fulltext_chinese -> index_fulltext
|
||||
const nameParts = indexName.split('_');
|
||||
if (nameParts.length > 2) {
|
||||
const possibleLang = nameParts[nameParts.length - 1];
|
||||
// 如果最后一部分是语言代码,移除它
|
||||
if (['chinese', 'english', 'simple', 'german', 'french', 'spanish', 'russian', 'japanese'].includes(possibleLang)) {
|
||||
index.name = nameParts.slice(0, -1).join('_');
|
||||
}
|
||||
}
|
||||
} else if (indexType === 'gist') {
|
||||
// 空间索引
|
||||
index.config = { type: 'spatial' };
|
||||
|
||||
// 提取列名
|
||||
const columnMatch = indexDef.match(/\(([^)]+)\)/);
|
||||
if (columnMatch) {
|
||||
const columns = columnMatch[1].split(',').map(c => c.trim().replace(/"/g, ''));
|
||||
index.attributes = columns.map(col => ({ name: col }));
|
||||
}
|
||||
} else if (indexType === 'hash') {
|
||||
// 哈希索引
|
||||
index.config = { type: 'hash' };
|
||||
|
||||
// 提取列名
|
||||
const columnMatch = indexDef.match(/\(([^)]+)\)/);
|
||||
if (columnMatch) {
|
||||
const columns = columnMatch[1].split(',').map(c => c.trim().replace(/"/g, ''));
|
||||
index.attributes = columns.map(col => ({ name: col }));
|
||||
}
|
||||
} else {
|
||||
// B-tree 索引(默认)
|
||||
// 提取列名和排序方向
|
||||
const columnMatch = indexDef.match(/\(([^)]+)\)/);
|
||||
if (columnMatch) {
|
||||
const columnDefs = columnMatch[1].split(',');
|
||||
index.attributes = columnDefs.map(colDef => {
|
||||
const trimmed = colDef.trim().replace(/"/g, '');
|
||||
const parts = trimmed.split(/\s+/);
|
||||
const attr: any = { name: parts[0] };
|
||||
|
||||
// 检查排序方向
|
||||
if (parts.includes('DESC')) {
|
||||
attr.direction = 'DESC';
|
||||
} else if (parts.includes('ASC')) {
|
||||
attr.direction = 'ASC';
|
||||
}
|
||||
|
||||
return attr;
|
||||
});
|
||||
}
|
||||
|
||||
// 如果是唯一索引
|
||||
if (isUnique) {
|
||||
index.config = { unique: true };
|
||||
}
|
||||
}
|
||||
|
||||
// 移除表名前缀(如果存在)
|
||||
// 例如: user_index_fulltext -> index_fulltext
|
||||
if (index.name.startsWith(`${tableName}_`)) {
|
||||
index.name = index.name.substring(tableName.length + 1);
|
||||
}
|
||||
|
||||
indexes.push(index);
|
||||
}
|
||||
|
||||
Object.assign(result, {
|
||||
[tableName]: {
|
||||
attributes,
|
||||
indexes,
|
||||
}
|
||||
});
|
||||
} else {
|
||||
Object.assign(result, {
|
||||
[tableName]: {
|
||||
attributes,
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* 将属性定义转换为 PostgreSQL DDL 语句
|
||||
* @param attr 属性名
|
||||
* @param attrDef 属性定义
|
||||
*/
|
||||
translateAttributeDef(attr: string, attrDef: Attribute): string {
|
||||
let sql = `"${attr}" `;
|
||||
const {
|
||||
type,
|
||||
params,
|
||||
default: defaultValue,
|
||||
unique,
|
||||
notNull,
|
||||
sequenceStart,
|
||||
enumeration,
|
||||
} = attrDef;
|
||||
|
||||
// 处理序列类型(IDENTITY)
|
||||
if (typeof sequenceStart === 'number') {
|
||||
sql += `bigint GENERATED BY DEFAULT AS IDENTITY (START WITH ${sequenceStart}) UNIQUE`;
|
||||
return sql;
|
||||
}
|
||||
|
||||
// 处理枚举类型
|
||||
if (type === 'enum') {
|
||||
assert(enumeration, 'Enum type requires enumeration values');
|
||||
// 注意:这里返回的是占位符,实际的枚举类型名在 translateCreateEntity 中确定
|
||||
// 为了比较一致性,我们使用枚举值的字符串表示
|
||||
sql += `enum(${enumeration.map(v => `'${v}'`).join(',')})`;
|
||||
} else {
|
||||
sql += this.populateDataTypeDef(type, params, enumeration);
|
||||
}
|
||||
|
||||
// NOT NULL 约束
|
||||
if (notNull || type === 'geometry') {
|
||||
sql += ' NOT NULL';
|
||||
}
|
||||
|
||||
// UNIQUE 约束
|
||||
if (unique) {
|
||||
sql += ' UNIQUE';
|
||||
}
|
||||
|
||||
// 默认值
|
||||
if (defaultValue !== undefined && !sequenceStart) {
|
||||
assert(type !== 'ref', 'ref type should not have default value');
|
||||
sql += ` DEFAULT ${this.translateAttrValue(type, defaultValue)}`;
|
||||
}
|
||||
|
||||
// 主键
|
||||
if (attr === 'id') {
|
||||
sql += ' PRIMARY KEY';
|
||||
}
|
||||
|
||||
return sql;
|
||||
}
|
||||
|
||||
/**
|
||||
* 比较两个 SQL 语句是否等价(用于 schema diff)
|
||||
* 忽略空格、大小写等格式差异
|
||||
*/
|
||||
compareSql(sql1: string, sql2: string): boolean {
|
||||
// 标准化 SQL:移除多余空格,统一大小写
|
||||
const normalize = (sql: string): string => {
|
||||
return sql
|
||||
.replace(/\s+/g, ' ') // 多个空格合并为一个
|
||||
.replace(/\(\s+/g, '(') // 移除括号后的空格
|
||||
.replace(/\s+\)/g, ')') // 移除括号前的空格
|
||||
.replace(/,\s+/g, ',') // 移除逗号后的空格
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
};
|
||||
|
||||
return normalize(sql1) === normalize(sql2);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,60 @@
|
|||
import { EntityDict } from "oak-domain/lib/base-app-domain";
|
||||
import {
|
||||
Attribute,
|
||||
EntityDict,
|
||||
Index,
|
||||
OperateOption,
|
||||
OperationResult,
|
||||
StorageSchema,
|
||||
TxnOption,
|
||||
} from 'oak-domain/lib/types';
|
||||
import { EntityDict as BaseEntityDict } from 'oak-domain/lib/base-app-domain';
|
||||
import { AsyncContext,AsyncRowStore } from "oak-domain/lib/store/AsyncRowStore";
|
||||
import { CreateEntityOption } from "./Translator";
|
||||
import { AggregationResult, SelectOption } from "oak-domain/lib/types";
|
||||
|
||||
export interface DbStore<ED extends EntityDict, Cxt extends AsyncContext<ED>> extends AsyncRowStore<ED, Cxt> {
|
||||
export type Plan = {
|
||||
newTables: Record<string, {
|
||||
attributes: Record<string, Attribute>;
|
||||
}>;
|
||||
newIndexes: Record<string, Index<any>[]>;
|
||||
updatedTables: Record<string, {
|
||||
attributes: Record<string, Attribute & { isNew: boolean }>;
|
||||
}>;
|
||||
updatedIndexes: Record<string, Index<any>[]>;
|
||||
};
|
||||
|
||||
export interface DbStore<ED extends EntityDict & BaseEntityDict, Cxt extends AsyncContext<ED>> extends AsyncRowStore<ED, Cxt> {
|
||||
connect: () => Promise<void>;
|
||||
disconnect: () => Promise<void>;
|
||||
initialize(options: CreateEntityOption): Promise<void>;
|
||||
aggregate<T extends keyof ED, OP extends SelectOption>(
|
||||
entity: T,
|
||||
aggregation: ED[T]['Aggregation'],
|
||||
context: Cxt,
|
||||
option: OP
|
||||
): Promise<AggregationResult<ED[T]['Schema']>>;
|
||||
operate<T extends keyof ED>(
|
||||
entity: T,
|
||||
operation: ED[T]['Operation'],
|
||||
context: Cxt,
|
||||
option: OperateOption
|
||||
): Promise<OperationResult<ED>>;
|
||||
select<T extends keyof ED>(
|
||||
entity: T,
|
||||
selection: ED[T]['Selection'],
|
||||
context: Cxt,
|
||||
option: SelectOption
|
||||
): Promise<Partial<ED[T]['Schema']>[]>;
|
||||
count<T extends keyof ED>(
|
||||
entity: T,
|
||||
selection: Pick<ED[T]['Selection'], 'filter' | 'count'>,
|
||||
context: Cxt,
|
||||
option: SelectOption
|
||||
): Promise<number>;
|
||||
begin(option?: TxnOption): Promise<string>;
|
||||
commit(txnId: string): Promise<void>;
|
||||
rollback(txnId: string): Promise<void>;
|
||||
readSchema(): Promise<StorageSchema<ED>>;
|
||||
makeUpgradePlan(): Promise<Plan>;
|
||||
diffSchema(schemaOld: StorageSchema<any>, schemaNew: StorageSchema<any>): Plan;
|
||||
};
|
||||
|
|
@ -1835,4 +1835,90 @@ export default (storeGetter: () => DbStore<EntityDict, TestContext>) => {
|
|||
assert(r1.length === 1, `Deeply nested query failed`);
|
||||
assert(r1[0].system?.platform?.name === 'test_platform', `Nested projection failed`);
|
||||
});
|
||||
|
||||
it('[1.4.1]嵌套跨节点表达式', async () => {
|
||||
const store = storeGetter();
|
||||
const context = new TestContext(store);
|
||||
await context.begin();
|
||||
|
||||
const platformId = v4();
|
||||
await store.operate('platform', {
|
||||
id: v4(),
|
||||
action: 'create',
|
||||
data: {
|
||||
id: platformId,
|
||||
name: 'test2'
|
||||
}
|
||||
}, context, {})
|
||||
await store.operate('application', {
|
||||
id: v4(),
|
||||
action: 'create',
|
||||
data: [{
|
||||
id: v4(),
|
||||
name: 'test',
|
||||
description: 'ttttt',
|
||||
type: 'web',
|
||||
system: {
|
||||
id: v4(),
|
||||
action: 'create',
|
||||
data: {
|
||||
id: v4(),
|
||||
name: 'systest',
|
||||
folder: 'systest',
|
||||
platformId,
|
||||
}
|
||||
},
|
||||
}, {
|
||||
id: v4(),
|
||||
name: 'test222',
|
||||
description: 'ttttt2',
|
||||
type: 'web',
|
||||
system: {
|
||||
id: v4(),
|
||||
action: 'create',
|
||||
data: {
|
||||
id: v4(),
|
||||
name: 'test2222',
|
||||
folder: 'test2',
|
||||
platformId,
|
||||
}
|
||||
},
|
||||
}]
|
||||
}, context, {});
|
||||
|
||||
process.env.NODE_ENV = 'development';
|
||||
|
||||
// 查询所有的application,过滤条件是application->system.name = application->system->platform.name
|
||||
const apps = await store.select('application', {
|
||||
data: {
|
||||
id: 1,
|
||||
name: 1,
|
||||
system: {
|
||||
id: 1,
|
||||
name: 1,
|
||||
platform: {
|
||||
id: 1,
|
||||
name: 1,
|
||||
}
|
||||
}
|
||||
},
|
||||
filter: {
|
||||
system: {
|
||||
"#id": 'node-1',
|
||||
platform: {
|
||||
$expr: {
|
||||
$eq: [
|
||||
{ "#attr": 'name' },
|
||||
{ "#refId": 'node-1', "#refAttr": 'folder' }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}, context, {});
|
||||
|
||||
process.env.NODE_ENV = undefined;
|
||||
assert(apps.length === 1 && apps[0].name === 'test222');
|
||||
await context.commit();
|
||||
});
|
||||
}
|
||||
Loading…
Reference in New Issue