Synchronizer较为完整的实现

This commit is contained in:
Xu Chang 2024-03-01 16:22:04 +08:00
parent a806e0638a
commit b12f04c523
7 changed files with 183 additions and 263 deletions

2
lib/AppLoader.d.ts vendored
View File

@ -12,7 +12,7 @@ export declare class AppLoader<ED extends EntityDict & BaseEntityDict, Cxt exten
private aspectDict;
private externalDependencies;
protected dataSubscriber?: DataSubscriber<ED, Cxt>;
protected synchronizer?: Synchronizer<ED, Cxt>;
protected synchronizers?: Synchronizer<ED, Cxt>[];
protected contextBuilder: (scene?: string) => (store: DbStore<ED, Cxt>) => Promise<Cxt>;
private requireSth;
protected makeContext(cxtStr?: string, headers?: IncomingHttpHeaders): Promise<Cxt>;

View File

@ -21,7 +21,7 @@ class AppLoader extends types_1.AppLoader {
aspectDict;
externalDependencies;
dataSubscriber;
synchronizer;
synchronizers;
contextBuilder;
requireSth(filePath) {
const depFilePath = (0, path_1.join)(this.path, filePath);
@ -101,10 +101,10 @@ class AppLoader extends types_1.AppLoader {
const dbConfigFile = (0, path_1.join)(this.path, 'configuration', 'mysql.json');
const dbConfig = require(dbConfigFile);
const syncConfigFile = (0, path_1.join)(this.path, 'lib', 'configuration', 'sync.js');
const syncConfig = (0, fs_1.existsSync)(syncConfigFile) && require(syncConfigFile).default;
const syncConfigs = (0, fs_1.existsSync)(syncConfigFile) && require(syncConfigFile).default;
return {
dbConfig: dbConfig,
syncConfig: syncConfig,
syncConfigs: syncConfigs,
};
}
constructor(path, contextBuilder, ns, nsServer) {
@ -149,70 +149,20 @@ class AppLoader extends types_1.AppLoader {
adTriggers.forEach((trigger) => this.registerTrigger(trigger));
checkers.forEach((checker) => this.dbStore.registerChecker(checker));
adCheckers.forEach((checker) => this.dbStore.registerChecker(checker));
if (this.synchronizer) {
if (this.synchronizers) {
// 同步数据到远端结点通过commit trigger来完成
const syncTriggers = this.synchronizer.getSyncTriggers();
syncTriggers.forEach((trigger) => this.registerTrigger(trigger));
for (const synchronizer of this.synchronizers) {
const syncTriggers = synchronizer.getSyncTriggers();
syncTriggers.forEach((trigger) => this.registerTrigger(trigger));
}
}
}
async mount(initialize) {
const { path } = this;
if (!initialize) {
const { dbConfig, syncConfig } = this.getConfiguration();
if (syncConfig) {
const { self, remotes } = syncConfig;
const { getSelfEncryptInfo, ...restSelf } = self;
this.synchronizer = new Synchronizer_1.default({
self: {
// entity: self.entity,
getSelfEncryptInfo: async () => {
const context = await this.contextBuilder()(this.dbStore);
await context.begin();
try {
const result = await self.getSelfEncryptInfo(context);
await context.commit();
return result;
}
catch (err) {
await context.rollback();
throw err;
}
},
...restSelf
},
remotes: remotes.map((r) => {
const { getPushInfo, getPullInfo, ...rest } = r;
return {
getRemotePushInfo: async (id) => {
const context = await this.contextBuilder()(this.dbStore);
await context.begin();
try {
const result = await getPushInfo(id, context);
await context.commit();
return result;
}
catch (err) {
await context.rollback();
throw err;
}
},
getRemotePullInfo: async (userId) => {
const context = await this.contextBuilder()(this.dbStore);
await context.begin();
try {
const result = await getPullInfo(userId, context);
await context.commit();
return result;
}
catch (err) {
await context.rollback();
throw err;
}
},
...rest,
};
})
}, this.dbStore.getSchema());
const { syncConfigs } = this.getConfiguration();
if (syncConfigs) {
this.synchronizers = syncConfigs.map(config => new Synchronizer_1.default(config, this.dbStore.getSchema()));
}
this.initTriggers();
}
@ -325,9 +275,11 @@ class AppLoader extends types_1.AppLoader {
transformEndpointItem(router, item);
}
}
if (this.synchronizer) {
const syncEp = this.synchronizer.getSelfEndpoint();
transformEndpointItem(syncEp.name, syncEp);
if (this.synchronizers) {
this.synchronizers.forEach((synchronizer) => {
const syncEp = synchronizer.getSelfEndpoint();
transformEndpointItem(syncEp.name, syncEp);
});
}
return endPointRouters;
}

View File

@ -1,8 +1,7 @@
import { EntityDict, StorageSchema, EndpointItem } from 'oak-domain/lib/types';
import { EntityDict, StorageSchema, EndpointItem, SyncConfig } from 'oak-domain/lib/types';
import { VolatileTrigger } from 'oak-domain/lib/types/Trigger';
import { EntityDict as BaseEntityDict } from 'oak-domain/lib/base-app-domain';
import { BackendRuntimeContext } from 'oak-frontend-base/lib/context/BackendRuntimeContext';
import { SyncConfigWrapper } from './types/Sync';
export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt extends BackendRuntimeContext<ED>> {
private config;
private schema;
@ -27,12 +26,11 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
private pushOper;
private getSelfEncryptInfo;
private makeCreateOperTrigger;
constructor(config: SyncConfigWrapper<ED, Cxt>, schema: StorageSchema<ED>);
constructor(config: SyncConfig<ED, Cxt>, schema: StorageSchema<ED>);
/**
* sync的定义 commit triggers
* @returns
*/
getSyncTriggers(): VolatileTrigger<ED, keyof ED, Cxt>[];
private checkOperationConsistent;
getSelfEndpoint(): EndpointItem<ED, Cxt>;
}

View File

@ -6,7 +6,8 @@ const assert_1 = tslib_1.__importDefault(require("assert"));
const path_1 = require("path");
const lodash_1 = require("oak-domain/lib/utils/lodash");
const filter_1 = require("oak-domain/lib/store/filter");
const OAK_SYNC_HEADER_ITEM = 'oak-sync-remote-id';
const OAK_SYNC_HEADER_ENTITY = 'oak-sync-entity';
const OAK_SYNC_HEADER_ENTITYID = 'oak-sync-entity-id';
class Synchronizer {
config;
schema;
@ -19,7 +20,7 @@ class Synchronizer {
* @param channel
* @param retry
*/
async pushOnChannel(channel, retry) {
async pushOnChannel(remoteEntity, remoteEntityId, context, channel, retry) {
const { queue, api, nextPushTimestamp } = channel;
(0, assert_1.default)(nextPushTimestamp);
// 失败重试的间隔失败次数多了应当适当延长最多延长到1024秒
@ -31,18 +32,20 @@ class Synchronizer {
let json;
try {
// todo 加密
const selfEncryptInfo = await this.getSelfEncryptInfo();
const selfEncryptInfo = await this.getSelfEncryptInfo(context);
console.log('向远端结点sync数据', api, JSON.stringify(opers));
const res = await fetch(api, {
const finalApi = (0, path_1.join)(api, selfEncryptInfo.id);
const res = await fetch(finalApi, {
method: 'post',
headers: {
'Content-Type': 'application/json',
[OAK_SYNC_HEADER_ITEM]: selfEncryptInfo.id,
[OAK_SYNC_HEADER_ENTITY]: remoteEntity,
[OAK_SYNC_HEADER_ENTITYID]: remoteEntityId,
},
body: JSON.stringify(opers),
});
if (res.status !== 200) {
throw new Error(`sync数据时访问api「${api}」的结果不是200。「${res.status}`);
throw new Error(`sync数据时访问api「${finalApi}」的结果不是200。「${res.status}`);
}
json = await res.json();
}
@ -74,7 +77,7 @@ class Synchronizer {
const interval = Math.max(0, channel.nextPushTimestamp - Date.now());
const retry2 = needRetry ? (typeof retry === 'number' ? retry + 1 : 1) : undefined;
console.log('need retry', retry2);
setTimeout(() => this.pushOnChannel(channel, retry2), interval);
setTimeout(() => this.pushOnChannel(remoteEntity, remoteEntityId, context, channel, retry2), interval);
}
else {
channel.handler = undefined;
@ -90,7 +93,7 @@ class Synchronizer {
*
* 其实这里还无法严格保证先产生的oper一定先到达被推送因为volatile trigger是在事务提交后再发生的但这种情况在目前应该跑不出来在实际执行oper的时候assert掉先by Xc 20240226
*/
async pushOper(oper, userId, url, endpoint, nextPushTimestamp) {
async pushOper(context, oper, userId, url, endpoint, remoteEntity, remoteEntityId, nextPushTimestamp) {
if (!this.remotePushChannel[userId]) {
this.remotePushChannel[userId] = {
api: (0, path_1.join)(url, 'endpoint', endpoint),
@ -125,7 +128,7 @@ class Synchronizer {
if (!channel.handler) {
channel.nextPushTimestamp = nextPushTimestamp2;
channel.handler = setTimeout(async () => {
await this.pushOnChannel(channel);
await this.pushOnChannel(remoteEntity, remoteEntityId, context, channel);
}, nextPushTimestamp2 - now);
}
else if (channel.nextPushTimestamp && channel.nextPushTimestamp > nextPushTimestamp2) {
@ -139,11 +142,11 @@ class Synchronizer {
console.warn('在sync数据时遇到了重复推送的oper', JSON.stringify(oper), userId, url);
}
}
async getSelfEncryptInfo() {
async getSelfEncryptInfo(context) {
if (this.selfEncryptInfo) {
return this.selfEncryptInfo;
}
this.selfEncryptInfo = await this.config.self.getSelfEncryptInfo();
this.selfEncryptInfo = await this.config.self.getSelfEncryptInfo(context);
return this.selfEncryptInfo;
}
makeCreateOperTrigger() {
@ -152,10 +155,10 @@ class Synchronizer {
// 根据remotes定义建立从entity到需要同步的远端结点信息的Map
const pushAccessMap = {};
remotes.forEach((remote) => {
const { getRemotePushInfo, pushEntities: pushEntityDefs, endpoint, pathToUser, relationName: rnRemote, entitySelf } = remote;
const { getPushInfo, pushEntities: pushEntityDefs, endpoint, pathToUser, relationName: rnRemote } = remote;
if (pushEntityDefs) {
const pushEntities = [];
const endpoint2 = (0, path_1.join)(endpoint || 'sync', entitySelf || self.entitySelf);
const endpoint2 = (0, path_1.join)(endpoint || 'sync', self.entity);
for (const def of pushEntityDefs) {
const { path, relationName, recursive, entity, actions, onSynchronized } = def;
pushEntities.push(entity);
@ -168,15 +171,21 @@ class Synchronizer {
}, recursive) : (0, relationPath_1.destructDirectPath)(this.schema, entity, path2, recursive);
const groupByUsers = (rows) => {
const userRowDict = {};
rows.filter((row) => {
const userIds = getData(row)?.map(ele => ele.userId);
if (userIds) {
userIds.forEach((userId) => {
rows.forEach((row) => {
const goals = getData(row);
if (goals) {
goals.forEach(({ entity, entityId, userId }) => {
if (userRowDict[userId]) {
userRowDict[userId].push(row.id);
// 逻辑上来说同一个userId其关联的entity和entityId必然相同这个entity/entityId代表了对方
(0, assert_1.default)(userRowDict[userId].entity === entity && userRowDict[userId].entityId === entityId);
userRowDict[userId].rowIds.push(row.id);
}
else {
userRowDict[userId] = [row.id];
userRowDict[userId] = {
entity,
entityId,
rowIds: [row.id],
};
}
});
}
@ -187,7 +196,7 @@ class Synchronizer {
pushAccessMap[entity] = [{
projection,
groupByUsers,
getRemotePushInfo,
getRemotePushInfo: getPushInfo,
endpoint: endpoint2,
entity,
actions,
@ -198,7 +207,7 @@ class Synchronizer {
pushAccessMap[entity].push({
projection,
groupByUsers,
getRemotePushInfo,
getRemotePushInfo: getPushInfo,
endpoint: endpoint2,
entity,
actions,
@ -250,7 +259,7 @@ class Synchronizer {
if (pushEntityNodes && pushEntityNodes.length > 0) {
// 每个pushEntityNode代表配置的一个remoteEntity
await Promise.all(pushEntityNodes.map(async (node) => {
const { projection, groupByUsers, getRemotePushInfo: getRemoteAccessInfo, endpoint, entity, actions, onSynchronized } = node;
const { projection, groupByUsers, getRemotePushInfo: getRemoteAccessInfo, endpoint, actions, onSynchronized } = node;
if (!actions || actions.includes(action)) {
const pushed = [];
const rows = await context.select(targetEntity, {
@ -267,7 +276,7 @@ class Synchronizer {
// userId就是需要发送给远端的user但是要将本次操作的user过滤掉操作的原本产生者
const userSendDict = groupByUsers(rows);
const pushToUserIdFn = async (userId) => {
const rowIds = userSendDict[userId];
const { entity, entityId, rowIds } = userSendDict[userId];
// 推送到远端结点的oper
const oper2 = {
id: oper.id,
@ -281,8 +290,11 @@ class Synchronizer {
bornAt: oper.bornAt,
targetEntity,
};
const { url } = await getRemoteAccessInfo(userId);
await this.pushOper(oper2 /** 这里不明白为什么TS过不去 */, userId, url, endpoint);
const { url } = await getRemoteAccessInfo(context, {
userId,
remoteEntityId: entityId,
});
await this.pushOper(context, oper2 /** 这里不明白为什么TS过不去 */, userId, url, endpoint, entity, entityId);
};
for (const userId in userSendDict) {
if (userId !== operatorId) {
@ -319,17 +331,15 @@ class Synchronizer {
getSyncTriggers() {
return [this.makeCreateOperTrigger()];
}
async checkOperationConsistent(entity, ids, bornAt) {
}
getSelfEndpoint() {
return {
name: this.config.self.endpoint || 'sync',
method: 'post',
params: ['entity'],
params: ['entity', 'entityId'],
fn: async (context, params, headers, req, body) => {
// body中是传过来的oper数组信息
const { entity } = params;
const { [OAK_SYNC_HEADER_ITEM]: id } = headers;
const { entity, entityId } = params;
const { [OAK_SYNC_HEADER_ENTITY]: meEntity, [OAK_SYNC_HEADER_ENTITYID]: meEntityId } = headers;
console.log('接收到来自远端的sync数据', entity, JSON.stringify(body));
const successIds = [];
let failed;
@ -337,22 +347,31 @@ class Synchronizer {
if (!this.remotePullInfoMap[entity]) {
this.remotePullInfoMap[entity] = {};
}
if (!this.remotePullInfoMap[entity][id]) {
const { getRemotePullInfo, pullEntities } = this.config.remotes.find(ele => ele.entity === entity);
if (!this.remotePullInfoMap[entity][entityId]) {
const { getPullInfo, pullEntities } = this.config.remotes.find(ele => ele.entity === entity);
const pullEntityDict = {};
if (pullEntities) {
pullEntities.forEach((def) => pullEntityDict[def.entity] = def);
}
this.remotePullInfoMap[entity][id] = {
pullInfo: await getRemotePullInfo(id),
this.remotePullInfoMap[entity][entityId] = {
pullInfo: await getPullInfo(context, {
selfId: meEntityId,
remoteEntityId: entityId,
}),
pullEntityDict,
};
}
const { pullInfo, pullEntityDict } = this.remotePullInfoMap[entity][id];
const { userId, algorithm, publicKey } = pullInfo;
// todo 解密
const { pullInfo, pullEntityDict } = this.remotePullInfoMap[entity][entityId];
const { userId, algorithm, publicKey, cxtInfo } = pullInfo;
(0, assert_1.default)(userId);
if (!this.pullMaxBornAtMap.hasOwnProperty(id)) {
context.setCurrentUserId(userId);
if (cxtInfo) {
await context.initialize(cxtInfo);
}
const selfEncryptInfo = await this.getSelfEncryptInfo(context);
(0, assert_1.default)(selfEncryptInfo.id === meEntityId && meEntity === this.config.self.entity);
// todo 解密
if (!this.pullMaxBornAtMap.hasOwnProperty(entityId)) {
const [maxHisOper] = await context.select('oper', {
data: {
id: 1,
@ -372,10 +391,9 @@ class Synchronizer {
indexFrom: 0,
count: 1,
}, { dontCollect: true });
this.pullMaxBornAtMap[id] = maxHisOper?.bornAt || 0;
this.pullMaxBornAtMap[entityId] = maxHisOper?.bornAt || 0;
}
let maxBornAt = this.pullMaxBornAtMap[id];
context.setCurrentUserId(userId);
let maxBornAt = this.pullMaxBornAtMap[entityId];
const opers = body;
const outdatedOpers = opers.filter(ele => ele.bornAt <= maxBornAt);
const freshOpers = opers.filter(ele => ele.bornAt > maxBornAt);
@ -441,7 +459,7 @@ class Synchronizer {
}
})()
]);
this.pullMaxBornAtMap[id] = maxBornAt;
this.pullMaxBornAtMap[entityId] = maxBornAt;
return {
successIds,
failed,

View File

@ -6,7 +6,7 @@ import { makeIntrinsicCTWs } from "oak-domain/lib/store/actionDef";
import { intersection, omit } from 'oak-domain/lib/utils/lodash';
import { EntityDict as BaseEntityDict } from 'oak-domain/lib/base-app-domain';
import { generateNewIdAsync } from 'oak-domain/lib/utils/uuid';
import { AppLoader as GeneralAppLoader, Trigger, Checker, Aspect, CreateOpResult, Context, EntityDict, Watcher, BBWatcher, WBWatcher, OpRecord, Routine, FreeRoutine, Timer, FreeTimer, StorageSchema, OperationResult } from "oak-domain/lib/types";
import { AppLoader as GeneralAppLoader, Trigger, Checker, Aspect, CreateOpResult, SyncConfig, EntityDict, Watcher, BBWatcher, WBWatcher, OpRecord, Routine, FreeRoutine, Timer, FreeTimer, StorageSchema, OperationResult } from "oak-domain/lib/types";
import { DbStore } from "./DbStore";
import generalAspectDict, { clearPorts, registerPorts } from 'oak-common-aspect/lib/index';
import { MySQLConfiguration } from 'oak-db/lib/MySQL/types/Configuration';
@ -19,7 +19,6 @@ import { Server as SocketIoServer, Namespace } from 'socket.io';
import DataSubscriber from './cluster/DataSubscriber';
import { getClusterInfo } from './cluster/env';
import Synchronizer from './Synchronizer';
import { SyncConfig } from './types/Sync';
export class AppLoader<ED extends EntityDict & BaseEntityDict, Cxt extends BackendRuntimeContext<ED>> extends GeneralAppLoader<ED, Cxt> {
@ -27,7 +26,7 @@ export class AppLoader<ED extends EntityDict & BaseEntityDict, Cxt extends Backe
private aspectDict: Record<string, Aspect<ED, Cxt>>;
private externalDependencies: string[];
protected dataSubscriber?: DataSubscriber<ED, Cxt>;
protected synchronizer?: Synchronizer<ED, Cxt>;
protected synchronizers?: Synchronizer<ED, Cxt>[];
protected contextBuilder: (scene?: string) => (store: DbStore<ED, Cxt>) => Promise<Cxt>;
private requireSth(filePath: string): any {
@ -127,11 +126,11 @@ export class AppLoader<ED extends EntityDict & BaseEntityDict, Cxt extends Backe
const dbConfigFile = join(this.path, 'configuration', 'mysql.json');
const dbConfig = require(dbConfigFile);
const syncConfigFile = join(this.path, 'lib', 'configuration', 'sync.js');
const syncConfig = existsSync(syncConfigFile) && require(syncConfigFile).default;
const syncConfigs = existsSync(syncConfigFile) && require(syncConfigFile).default;
return {
dbConfig: dbConfig as MySQLConfiguration,
syncConfig: syncConfig as SyncConfig<ED, Cxt> | undefined,
syncConfigs: syncConfigs as SyncConfig<ED, Cxt>[] | undefined,
};
}
@ -202,80 +201,26 @@ export class AppLoader<ED extends EntityDict & BaseEntityDict, Cxt extends Backe
(checker) => this.dbStore.registerChecker(checker)
);
if (this.synchronizer) {
if (this.synchronizers) {
// 同步数据到远端结点通过commit trigger来完成
const syncTriggers = this.synchronizer.getSyncTriggers();
syncTriggers.forEach(
(trigger) => this.registerTrigger(trigger)
);
for (const synchronizer of this.synchronizers) {
const syncTriggers = synchronizer.getSyncTriggers();
syncTriggers.forEach(
(trigger) => this.registerTrigger(trigger)
);
}
}
}
async mount(initialize?: true) {
const { path } = this;
if (!initialize) {
const { dbConfig, syncConfig } = this.getConfiguration();
const { syncConfigs } = this.getConfiguration();
if (syncConfig) {
const {
self, remotes
} = syncConfig;
const { getSelfEncryptInfo, ...restSelf } = self;
this.synchronizer = new Synchronizer({
self: {
// entity: self.entity,
getSelfEncryptInfo: async () => {
const context = await this.contextBuilder()(this.dbStore);
await context.begin();
try {
const result = await self.getSelfEncryptInfo(context);
await context.commit();
return result;
}
catch (err) {
await context.rollback();
throw err;
}
},
...restSelf
},
remotes: remotes.map(
(r) => {
const { getPushInfo, getPullInfo, ...rest } = r;
return {
getRemotePushInfo: async (id) => {
const context = await this.contextBuilder()(this.dbStore);
await context.begin();
try {
const result = await getPushInfo(id, context);
await context.commit();
return result;
}
catch (err) {
await context.rollback();
throw err;
}
},
getRemotePullInfo: async (userId) => {
const context = await this.contextBuilder()(this.dbStore);
await context.begin();
try {
const result = await getPullInfo(userId, context);
await context.commit();
return result;
}
catch (err) {
await context.rollback();
throw err;
}
},
...rest,
};
}
)
}, this.dbStore.getSchema());
if (syncConfigs) {
this.synchronizers = syncConfigs.map(
config => new Synchronizer(config, this.dbStore.getSchema())
);
}
this.initTriggers();
@ -407,9 +352,13 @@ export class AppLoader<ED extends EntityDict & BaseEntityDict, Cxt extends Backe
}
}
if (this.synchronizer) {
const syncEp = this.synchronizer.getSelfEndpoint();
transformEndpointItem(syncEp.name, syncEp);
if (this.synchronizers) {
this.synchronizers.forEach(
(synchronizer) => {
const syncEp = synchronizer.getSelfEndpoint();
transformEndpointItem(syncEp.name, syncEp);
}
);
}
return endPointRouters;
}

View File

@ -1,4 +1,5 @@
import { EntityDict, StorageSchema, EndpointItem, RemotePullInfo, SelfEncryptInfo, RemotePushInfo, PushEntityDef, PullEntityDef } from 'oak-domain/lib/types';
import { EntityDict, StorageSchema, EndpointItem, RemotePullInfo, SelfEncryptInfo,
RemotePushInfo, PushEntityDef, PullEntityDef, SyncConfig } from 'oak-domain/lib/types';
import { VolatileTrigger } from 'oak-domain/lib/types/Trigger';
import { EntityDict as BaseEntityDict } from 'oak-domain/lib/base-app-domain';
import { destructRelationPath, destructDirectPath } from 'oak-domain/lib/utils/relationPath';
@ -6,10 +7,10 @@ import { BackendRuntimeContext } from 'oak-frontend-base/lib/context/BackendRunt
import assert from 'assert';
import { join } from 'path';
import { difference } from 'oak-domain/lib/utils/lodash';
import { SyncConfigWrapper } from './types/Sync';
import { getRelevantIds } from 'oak-domain/lib/store/filter';
const OAK_SYNC_HEADER_ITEM = 'oak-sync-remote-id';
const OAK_SYNC_HEADER_ENTITY = 'oak-sync-entity';
const OAK_SYNC_HEADER_ENTITYID = 'oak-sync-entity-id';
type Channel<ED extends EntityDict & BaseEntityDict> = {
queue: Array<{
@ -23,7 +24,7 @@ type Channel<ED extends EntityDict & BaseEntityDict> = {
};
export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt extends BackendRuntimeContext<ED>> {
private config: SyncConfigWrapper<ED, Cxt>;
private config: SyncConfig<ED, Cxt>;
private schema: StorageSchema<ED>;
private selfEncryptInfo?: SelfEncryptInfo;
private remotePullInfoMap: Record<string, Record<string, {
@ -39,7 +40,7 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
* @param channel
* @param retry
*/
private async pushOnChannel(channel: Channel<ED>, retry?: number) {
private async pushOnChannel(remoteEntity: keyof ED, remoteEntityId: string, context: Cxt, channel: Channel<ED>, retry?: number) {
const { queue, api, nextPushTimestamp } = channel;
assert(nextPushTimestamp);
@ -59,19 +60,21 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
};
try {
// todo 加密
const selfEncryptInfo = await this.getSelfEncryptInfo();
const selfEncryptInfo = await this.getSelfEncryptInfo(context);
console.log('向远端结点sync数据', api, JSON.stringify(opers));
const res = await fetch(api, {
const finalApi = join(api, selfEncryptInfo.id);
const res = await fetch(finalApi, {
method: 'post',
headers: {
'Content-Type': 'application/json',
[OAK_SYNC_HEADER_ITEM]: selfEncryptInfo!.id,
[OAK_SYNC_HEADER_ENTITY]: remoteEntity as string,
[OAK_SYNC_HEADER_ENTITYID]: remoteEntityId,
},
body: JSON.stringify(opers),
});
if (res.status !== 200) {
throw new Error(`sync数据时访问api「${api}」的结果不是200。「${res.status}`);
throw new Error(`sync数据时访问api「${finalApi}」的结果不是200。「${res.status}`);
}
json = await res.json();
}
@ -107,7 +110,7 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
const interval = Math.max(0, channel.nextPushTimestamp - Date.now());
const retry2 = needRetry ? (typeof retry === 'number' ? retry + 1 : 1) : undefined;
console.log('need retry', retry2);
setTimeout(() => this.pushOnChannel(channel, retry2), interval);
setTimeout(() => this.pushOnChannel(remoteEntity, remoteEntityId, context, channel, retry2), interval);
}
else {
channel.handler = undefined;
@ -125,10 +128,13 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
* oper一定先到达被推送volatile trigger是在事务提交后再发生的oper的时候assert掉先by Xc 20240226
*/
private async pushOper(
context: Cxt,
oper: Partial<ED['oper']['Schema']>,
userId: string,
url: string,
endpoint: string,
remoteEntity: keyof ED,
remoteEntityId: string,
nextPushTimestamp?: number
) {
if (!this.remotePushChannel[userId]) {
@ -168,7 +174,7 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
if (!channel.handler) {
channel.nextPushTimestamp = nextPushTimestamp2;
channel.handler = setTimeout(async () => {
await this.pushOnChannel(channel);
await this.pushOnChannel(remoteEntity, remoteEntityId, context, channel);
}, nextPushTimestamp2 - now);
}
else if (channel.nextPushTimestamp && channel.nextPushTimestamp > nextPushTimestamp2) {
@ -184,11 +190,11 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
}
}
private async getSelfEncryptInfo() {
private async getSelfEncryptInfo(context: Cxt) {
if (this.selfEncryptInfo) {
return this.selfEncryptInfo;
}
this.selfEncryptInfo = await this.config.self.getSelfEncryptInfo();
this.selfEncryptInfo = await this.config.self.getSelfEncryptInfo(context);
return this.selfEncryptInfo!;
}
@ -198,20 +204,24 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
// 根据remotes定义建立从entity到需要同步的远端结点信息的Map
const pushAccessMap: Record<string, Array<{
projection: ED[keyof ED]['Selection']['data']; // 从entity上取到相关user需要的projection
groupByUsers: (row: Partial<ED[keyof ED]['Schema']>[]) => Record<string, string[]>; // 根据相关数据行关联的userId对行ID进行分组
getRemotePushInfo: (userId: string) => Promise<RemotePushInfo>; // 根据userId获得相应push远端的信息
endpoint: string; // 远端接收endpoint的url
projection: ED[keyof ED]['Selection']['data']; // 从entity上取到相关user需要的projection
groupByUsers: (row: Partial<ED[keyof ED]['Schema']>[]) => Record<string, {
entity: keyof ED; // 对方目标对象
entityId: string; // 对象目标对象Id
rowIds: string[]; // 要推送的rowId
}>; // 根据相关数据行关联的userId对行ID进行重分组键值为userId
getRemotePushInfo: SyncConfig<ED, Cxt>['remotes'][number]['getPushInfo']; // 根据userId获得相应push远端的信息
endpoint: string; // 远端接收endpoint的url
actions?: string[];
onSynchronized: PushEntityDef<ED, keyof ED, Cxt>['onSynchronized'];
entity: keyof ED;
}>> = {};
remotes.forEach(
(remote) => {
const { getRemotePushInfo, pushEntities: pushEntityDefs, endpoint, pathToUser, relationName: rnRemote, entitySelf } = remote;
const { getPushInfo, pushEntities: pushEntityDefs, endpoint, pathToUser, relationName: rnRemote } = remote;
if (pushEntityDefs) {
const pushEntities = [] as Array<keyof ED>;
const endpoint2 = join(endpoint || 'sync', entitySelf as string || self.entitySelf as string);
const endpoint2 = join(endpoint || 'sync', self.entity as string);
for (const def of pushEntityDefs) {
const { path, relationName, recursive, entity, actions, onSynchronized } = def;
pushEntities.push(entity);
@ -228,19 +238,30 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
}, recursive) : destructDirectPath(this.schema, entity, path2, recursive);
const groupByUsers = (rows: Partial<ED[keyof ED]['Schema']>[]) => {
const userRowDict: Record<string, string[]> = {};
rows.filter(
const userRowDict: Record<string, {
rowIds: string[];
entityId: string;
entity: keyof ED;
}> = {};
rows.forEach(
(row) => {
const userIds = getData(row)?.map(ele => ele.userId);
if (userIds) {
userIds.forEach(
(userId) => {
const goals = getData(row);
if (goals) {
goals.forEach(
({ entity, entityId, userId }) => {
if (userRowDict[userId]) {
userRowDict[userId].push(row.id!);
// 逻辑上来说同一个userId其关联的entity和entityId必然相同这个entity/entityId代表了对方
assert(userRowDict[userId].entity === entity && userRowDict[userId].entityId === entityId);
userRowDict[userId].rowIds.push(row.id!);
}
else {
userRowDict[userId] = [row.id!];
userRowDict[userId] = {
entity,
entityId,
rowIds: [row.id!],
};
}
}
)
}
@ -253,7 +274,7 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
pushAccessMap[entity as string] = [{
projection,
groupByUsers,
getRemotePushInfo,
getRemotePushInfo: getPushInfo,
endpoint: endpoint2,
entity,
actions,
@ -264,7 +285,7 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
pushAccessMap[entity as string].push({
projection,
groupByUsers,
getRemotePushInfo,
getRemotePushInfo: getPushInfo,
endpoint: endpoint2,
entity,
actions,
@ -324,7 +345,7 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
await Promise.all(
pushEntityNodes.map(
async (node) => {
const { projection, groupByUsers, getRemotePushInfo: getRemoteAccessInfo, endpoint, entity, actions, onSynchronized } = node;
const { projection, groupByUsers, getRemotePushInfo: getRemoteAccessInfo, endpoint, actions, onSynchronized } = node;
if (!actions || actions.includes(action!)) {
const pushed = [] as Promise<void>[];
const rows = await context.select(targetEntity!, {
@ -342,7 +363,7 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
// userId就是需要发送给远端的user但是要将本次操作的user过滤掉操作的原本产生者
const userSendDict = groupByUsers(rows);
const pushToUserIdFn = async (userId: string) => {
const rowIds = userSendDict[userId];
const { entity, entityId, rowIds } = userSendDict[userId];
// 推送到远端结点的oper
const oper2 = {
id: oper.id!,
@ -356,8 +377,11 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
bornAt: oper.bornAt!,
targetEntity,
};
const { url } = await getRemoteAccessInfo(userId);
await this.pushOper(oper2 as any /** 这里不明白为什么TS过不去 */, userId, url, endpoint);
const { url } = await getRemoteAccessInfo(context, {
userId,
remoteEntityId: entityId,
});
await this.pushOper(context, oper2 as any /** 这里不明白为什么TS过不去 */, userId, url, endpoint, entity, entityId);
};
for (const userId in userSendDict) {
if (userId !== operatorId) {
@ -389,7 +413,7 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
return createOperTrigger;
}
constructor(config: SyncConfigWrapper<ED, Cxt>, schema: StorageSchema<ED>) {
constructor(config: SyncConfig<ED, Cxt>, schema: StorageSchema<ED>) {
this.config = config;
this.schema = schema;
}
@ -402,15 +426,11 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
return [this.makeCreateOperTrigger()] as Array<VolatileTrigger<ED, keyof ED, Cxt>>;
}
private async checkOperationConsistent(entity: keyof ED, ids: string[], bornAt: number) {
}
getSelfEndpoint(): EndpointItem<ED, Cxt> {
return {
name: this.config.self.endpoint || 'sync',
method: 'post',
params: ['entity'],
params: ['entity', 'entityId'],
fn: async (context, params, headers, req, body): Promise<{
successIds: string[],
failed?: {
@ -419,8 +439,8 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
};
}> => {
// body中是传过来的oper数组信息
const { entity } = params;
const { [OAK_SYNC_HEADER_ITEM]: id } = headers;
const { entity, entityId } = params;
const { [OAK_SYNC_HEADER_ENTITY]: meEntity, [OAK_SYNC_HEADER_ENTITYID]: meEntityId } = headers;
console.log('接收到来自远端的sync数据', entity, JSON.stringify(body));
const successIds = [] as string[];
@ -432,26 +452,35 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
if (!this.remotePullInfoMap[entity]) {
this.remotePullInfoMap[entity] = {};
}
if (!this.remotePullInfoMap[entity]![id as string]) {
const { getRemotePullInfo, pullEntities } = this.config.remotes.find(ele => ele.entity === entity)!;
if (!this.remotePullInfoMap[entity]![entityId]) {
const { getPullInfo, pullEntities } = this.config.remotes.find(ele => ele.entity === entity)!;
const pullEntityDict = {} as Record<string, PullEntityDef<ED, keyof ED, Cxt>>;
if (pullEntities) {
pullEntities.forEach(
(def) => pullEntityDict[def.entity as string] = def
);
}
this.remotePullInfoMap[entity]![id as string] = {
pullInfo: await getRemotePullInfo(id as string),
this.remotePullInfoMap[entity]![entityId] = {
pullInfo: await getPullInfo(context, {
selfId: meEntityId as string,
remoteEntityId: entityId,
}),
pullEntityDict,
};
}
const { pullInfo, pullEntityDict } = this.remotePullInfoMap[entity][id as string]!;
const { userId, algorithm, publicKey } = pullInfo;
const { pullInfo, pullEntityDict } = this.remotePullInfoMap[entity][entityId]!;
const { userId, algorithm, publicKey, cxtInfo } = pullInfo;
assert(userId);
context.setCurrentUserId(userId);
if (cxtInfo) {
await context.initialize(cxtInfo);
}
const selfEncryptInfo = await this.getSelfEncryptInfo(context);
assert(selfEncryptInfo.id === meEntityId && meEntity === this.config.self.entity);
// todo 解密
assert(userId);
if (!this.pullMaxBornAtMap.hasOwnProperty(id as string)) {
if (!this.pullMaxBornAtMap.hasOwnProperty(entityId)) {
const [maxHisOper] = await context.select('oper', {
data: {
id: 1,
@ -471,11 +500,10 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
indexFrom: 0,
count: 1,
}, { dontCollect: true });
this.pullMaxBornAtMap[id as string] = maxHisOper?.bornAt as number || 0;
this.pullMaxBornAtMap[entityId] = maxHisOper?.bornAt as number || 0;
}
let maxBornAt = this.pullMaxBornAtMap[id as string]!;
context.setCurrentUserId(userId);
let maxBornAt = this.pullMaxBornAtMap[entityId]!;
const opers = body as ED['oper']['Schema'][];
const outdatedOpers = opers.filter(
@ -553,7 +581,7 @@ export default class Synchronizer<ED extends EntityDict & BaseEntityDict, Cxt ex
]
);
this.pullMaxBornAtMap[id as string] = maxBornAt;
this.pullMaxBornAtMap[entityId] = maxBornAt;
return {
successIds,
failed,

View File

@ -1,25 +0,0 @@
import { EntityDict } from 'oak-domain/lib/types';
import { EntityDict as BaseEntityDict } from 'oak-domain/lib/base-app-domain';
import { BackendRuntimeContext } from 'oak-frontend-base/lib/context/BackendRuntimeContext';
import { RemotePushInfo, RemotePullInfo, SelfEncryptInfo, SyncRemoteConfigBase, SyncSelfConfigBase, SyncConfig } from 'oak-domain/lib/types/Sync';
interface SyncRemoteConfigWrapper<ED extends EntityDict & BaseEntityDict, Cxt extends BackendRuntimeContext<ED>> extends SyncRemoteConfigBase<ED, Cxt> {
getRemotePushInfo: (userId: string) => Promise<RemotePushInfo>;
getRemotePullInfo: (id: string) => Promise<RemotePullInfo>;
};
interface SyncSelfConfigWrapper<ED extends EntityDict & BaseEntityDict> extends SyncSelfConfigBase<ED> {
getSelfEncryptInfo: () => Promise<SelfEncryptInfo>;
};
export interface SyncConfigWrapper<ED extends EntityDict & BaseEntityDict, Cxt extends BackendRuntimeContext<ED>> {
self: SyncSelfConfigWrapper<ED>;
remotes: Array<SyncRemoteConfigWrapper<ED, Cxt>>;
};
export {
RemotePushInfo,
RemotePullInfo,
SelfEncryptInfo,
SyncConfig,
};