forked from apxtri/apxtri
864 lines
29 KiB
JavaScript
864 lines
29 KiB
JavaScript
|
const glob = require("glob");
|
||
|
const path = require("path");
|
||
|
const fs = require("fs-extra");
|
||
|
const dayjs = require("dayjs");
|
||
|
const axios = require("axios");
|
||
|
const conf = require(`../../conf/townconf.json`);
|
||
|
const Checkjson = require(`./Checkjson.js`);
|
||
|
const { promiseHooks } = require("v8");
|
||
|
|
||
|
/**
|
||
|
* This manage Objects for indexing, searching, checking and act to CRUD
|
||
|
* @objectPathName = objectpath/objectname
|
||
|
* objectpath/objectname/conf.json
|
||
|
* /idx/all_key1.json = {key1value:{object}}
|
||
|
* lst_key1.json = [key1valA,key1val2,...]
|
||
|
* key2_key1.json = {key2value:[key1value]}
|
||
|
* all_key1_filtername = {key1value:{object}}
|
||
|
* /itm/apxidvalue.json
|
||
|
* in conf.json:
|
||
|
* {
|
||
|
* "name": "object name ex:'nations'",
|
||
|
* "schema": "relativ schema from dirapi dirtown ex:'adminapi/schema/nations.json'"",
|
||
|
* "lastupdateschema": 0, time stamp last schema update
|
||
|
* "lastupdatedata":0 time stamp last itm update
|
||
|
* }
|
||
|
*
|
||
|
* Specifics key in schema to apxtri:
|
||
|
* apxid : the field value to use to store item
|
||
|
* apxuniquekey : list of field that has to be unique you cannot have 2 itm with same key value
|
||
|
* apxidx : list of index file /idx/
|
||
|
* { "name":"lst_fieldA", "keyval": "alias" }, => lst_fieldA.json = [fieldAvalue1,...]
|
||
|
{ "name":"all_fieldA", "keyval": "fieldA" }, => all_fieldA.json =
|
||
|
if fieldA in apxuniquekey = {fieldAvalue1:{object}}
|
||
|
not in apxuniquekey = {fieldAvalue1:[{object}]}
|
||
|
{ "name":"word_fieldA", "keyval": "fieldA", "objkey": ["fieldB"]}, => word_fieldA.json
|
||
|
if fieldA in apxuniquekey = {fieldAvalue1:fieldBvalue,}
|
||
|
if fieldA not in uniquekey = {fieldAvalue1: [fieldBv1,fieldBv2,]}
|
||
|
{ "name":"word_fieldA", "keyval": "fieldA", "objkey": ["fieldB","fieldC"]}, => word_fieldA.json
|
||
|
if fieldA in apxuniquekey = {fieldAvalue1:{fieldB:val,fieldC:val},}
|
||
|
if fieldA not in uniquekey = {fieldAvalue1: [{fieldB:val,fieldC:val},]}
|
||
|
* apxaccessrights : list of profil with CRUD accesrights
|
||
|
|
||
|
|
||
|
|
||
|
**/
|
||
|
|
||
|
const Odmdb = {};
|
||
|
|
||
|
/**
|
||
|
* @api syncObject
|
||
|
* @param {string} url to an existing object conf (/objectname/conf.json)
|
||
|
* @param {timestamp} timestamp
|
||
|
* 0 => rebuild local object from all_{idapx}.json
|
||
|
* >0 => update itm and idx search by datetime
|
||
|
* @param
|
||
|
*/
|
||
|
Odmdb.syncObject = () => {};
|
||
|
|
||
|
/**
|
||
|
* @api createObject: create a space to host object
|
||
|
*
|
||
|
* @source {string} "new", url,
|
||
|
* @schemapath {string} path to create or replace a schema ${schemaPath}/schema/
|
||
|
* @objectPath {string} path where object are store
|
||
|
* @objectName {string} name of the object
|
||
|
* @schema {object} the json schema for this object
|
||
|
* @lgjson {object} the json file for a specific language
|
||
|
* @lg {string} the 2 letters language
|
||
|
*
|
||
|
* Setup a new object localy =>
|
||
|
* source
|
||
|
*
|
||
|
* - from scratch =>
|
||
|
* Create
|
||
|
* - from a synchronization
|
||
|
* Download from source all_{apxid}.json
|
||
|
*
|
||
|
*
|
||
|
*/
|
||
|
Odmdb.createObject = (
|
||
|
source,
|
||
|
schemaPath,
|
||
|
objectPath,
|
||
|
objectName,
|
||
|
schema,
|
||
|
lgjson,
|
||
|
lg
|
||
|
) => {
|
||
|
if (!fs.existsSync(schemaPath)) {
|
||
|
return {
|
||
|
status: 404,
|
||
|
ref: "Odmdb",
|
||
|
info: "pathnamedoesnotexist",
|
||
|
moreinfo: { fullpath: schemaPath },
|
||
|
};
|
||
|
}
|
||
|
if (!fs.existsSync(objectPath)) {
|
||
|
return {
|
||
|
status: 404,
|
||
|
ref: "Odmdb",
|
||
|
info: "pathnamedoesnotexist",
|
||
|
moreinfo: { fullpath: objectPath },
|
||
|
};
|
||
|
}
|
||
|
// store schema file if not empty undefined or {}
|
||
|
if (
|
||
|
schema &&
|
||
|
!(Object.keys(schema).length === 0 && schema.constructor === Object)
|
||
|
) {
|
||
|
fs.outputJSONSync(`${schemaPath}/schema/${objectName}.json`, schema, {
|
||
|
spaces: 2,
|
||
|
});
|
||
|
}
|
||
|
if (
|
||
|
lgjson &&
|
||
|
lg &&
|
||
|
!(Object.keys(lgjson).length === 0 && lgjson.constructor === Object)
|
||
|
) {
|
||
|
fs.outputJSONSync(`${schemaPath}/lg/${objectName}_${lg}.json`, lgjson, {
|
||
|
spaces: 2,
|
||
|
});
|
||
|
}
|
||
|
//create environnement object with the new schema config
|
||
|
if (!fs.existsSync(`${objectPath}/${objectName}`)) {
|
||
|
fs.outputJsonSync(
|
||
|
`${objectPath}/${objectName}/idx/confjson`,
|
||
|
{ schema: `${schemaPath}/schema/${objectName}.json` },
|
||
|
{ spaces: 2 }
|
||
|
);
|
||
|
}
|
||
|
return { status: 200 };
|
||
|
};
|
||
|
|
||
|
/**
|
||
|
* Update an object
|
||
|
* @param {string} objectPathname
|
||
|
* @param {object} meta update request
|
||
|
* lg:
|
||
|
* lgobj: object to store in /lg/objectname_lg.json
|
||
|
* schema: an update schema
|
||
|
* @return {status, ref:"Odmdb", msg:"", data}
|
||
|
*
|
||
|
* Create a tmp object env and check existing object to identify any issues
|
||
|
* If all is fine then apply change by replacing
|
||
|
*/
|
||
|
Odmdb.updateObject = (objectPathname, meta) => {};
|
||
|
|
||
|
/**
|
||
|
* Get a schema from objectPathname
|
||
|
*
|
||
|
* todo only local schema => plan a sync each 10minutes
|
||
|
* @schemaPath local path adminapi/schema/objectName.json or /tribename/schema/objectName
|
||
|
* @validschema boolean if necessary to check schema or not mainly use when change schema
|
||
|
* @return {status:200,data:{conf:"schemaconf",schema:"schemacontent"} }
|
||
|
*/
|
||
|
Odmdb.Schema = (objectPathname, validschema) => {
|
||
|
const getpath = (schemaPath) => {
|
||
|
if (schemaPath.slice(-5) != ".json") schemaPath += ".json";
|
||
|
if (schemaPath.substring(0, 4) == "http") {
|
||
|
// lance requete http pour recuperer le schema avec un await axios
|
||
|
} else {
|
||
|
schemaPath = `../nationchains/tribes/${schemaPath}`;
|
||
|
/*if (schemaPath.substring(0, 9) == "adminapi/") {
|
||
|
schemaPath = `${conf.dirapi}/${schemaPath}`;
|
||
|
} else {
|
||
|
schemaPath = `${conf.dirtown}/tribes/${schemaPath}`;
|
||
|
}*/
|
||
|
if (!fs.existsSync(schemaPath)) {
|
||
|
return {};
|
||
|
} else {
|
||
|
return fs.readJsonSync(schemaPath);
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
console.log(`${objectPathname}/conf.json`);
|
||
|
const confschema = fs.readJsonSync(`${objectPathname}/conf.json`);
|
||
|
console.log(confschema);
|
||
|
const res = {
|
||
|
status: 200,
|
||
|
ref: "Odmdb",
|
||
|
msg: "getschema",
|
||
|
data: { conf: confschema },
|
||
|
};
|
||
|
res.data.schema = getpath(confschema.schema);
|
||
|
|
||
|
if (Object.keys(res.data.schema).length == 0) {
|
||
|
return {
|
||
|
status: 404,
|
||
|
ref: "Odmdb",
|
||
|
msg: "schemanotfound",
|
||
|
data: { objectPathname, schema: {} },
|
||
|
};
|
||
|
}
|
||
|
|
||
|
//looking for type:object with $ref to load and replace by ref content (ref must be adminapi/ or tribeid/)
|
||
|
//@todo only 1 level $ref if multi level need to rewrite with recursive call
|
||
|
Object.keys(res.data.schema.properties).forEach((p) => {
|
||
|
if (
|
||
|
res.data.schema.properties[p].type == "object" &&
|
||
|
res.data.schema.properties[p]["$ref"]
|
||
|
) {
|
||
|
const subschema = getpath(res.data.schema.properties[p]["$ref"]);
|
||
|
if (Object.keys(res.data.schema).length == 0) {
|
||
|
res.status = 404;
|
||
|
res.msg = "missingref";
|
||
|
res.data.missingref = res.data.schema.properties[p]["$ref"];
|
||
|
return res;
|
||
|
} else {
|
||
|
subschema.description += ` from external schema: ${res.data.schema.properties[p]["$ref"]}`;
|
||
|
res.data.schema.properties[p] = subschema;
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
|
||
|
if (!res.data.schema.apxid) {
|
||
|
return {
|
||
|
status: 406,
|
||
|
ref: "Odmdb",
|
||
|
msg: "missingprimarykey",
|
||
|
data: {},
|
||
|
};
|
||
|
}
|
||
|
if (res.data.schema.apxidx) {
|
||
|
//add apxidx to apxuniquekey in case not
|
||
|
if (!res.data.schema.apxuniquekey.includes(res.data.schema.apxid)) {
|
||
|
res.data.schema.apxuniquekey.push(res.data.schema.apxid);
|
||
|
}
|
||
|
res.data.schema.apxidx.forEach((idx) => {
|
||
|
if (
|
||
|
idx.objkey &&
|
||
|
!res.data.schema.apxuniquekey.includes(idx.keyval) &&
|
||
|
!idx.objkey.includes(res.data.schema.apxid)
|
||
|
) {
|
||
|
return {
|
||
|
status: 406,
|
||
|
ref: "Odmdb",
|
||
|
msg: "unconsistencyapxidx",
|
||
|
data: {
|
||
|
name: idx.name,
|
||
|
keyval: idx.keyval,
|
||
|
objkey: idx.objkey,
|
||
|
apxid: res.data.schema.apxid,
|
||
|
},
|
||
|
};
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
if (validschema || 1 == 1) {
|
||
|
// return {status:200, ref, msg} or {status!:200,multimsg:[{ref,msg;data}]}
|
||
|
const check = Checkjson.schema.validation(res.data.schema);
|
||
|
if (check.status != 200) {
|
||
|
res.multimsg = check.multimsg;
|
||
|
res.status = check.status;
|
||
|
}
|
||
|
}
|
||
|
return res;
|
||
|
};
|
||
|
Odmdb.search = (objectPath, objectName, search) => {
|
||
|
/*
|
||
|
@search= {
|
||
|
txt: string,
|
||
|
algo: match | pattern | fuzzy
|
||
|
fieldstring:[list of field],
|
||
|
indexfilter:{index1:[val1,val2 | ] }
|
||
|
}
|
||
|
Return data:[uuids]
|
||
|
|
||
|
example: search exact match hill in townId
|
||
|
heavy search={txt:"hill",algo:"match",fieldstring:"toxnId"}
|
||
|
light search={txt:"hill", algo:"match", indexfilter:{"key":"townId","value":[]}}
|
||
|
light search={txt:"hill", algo:"match", indexfilter:{"key":"nationId","value":"ants"}}
|
||
|
|
||
|
*/
|
||
|
const schema = Odmdb.schema(objectPath, objectName);
|
||
|
if (schema.status != 200) return schema;
|
||
|
};
|
||
|
|
||
|
Odmdb.r = (objectPathname, apxid, role) => {
|
||
|
const itmf = `${objectPathname}/itm/${apxid}.json`;
|
||
|
if (!fs.existsSync(itmf)) {
|
||
|
return {
|
||
|
status: 404,
|
||
|
ref: "Odmdb",
|
||
|
msg: "persondoesnotexist",
|
||
|
data: { person: apxid },
|
||
|
};
|
||
|
}
|
||
|
const getschema = Odmdb.Schema(objectPathname, true);
|
||
|
if (getschema.status != 200) return getschema;
|
||
|
const itm = fs.readJsonSync(itmf);
|
||
|
if (itm.owner && itm.owner == role.xalias) {
|
||
|
role.xprofils.push("owner");
|
||
|
}
|
||
|
const accessright = Odmdb.accessright(
|
||
|
getschema.data.schema.apxaccessrights,
|
||
|
role
|
||
|
);
|
||
|
if (!accessright.R) {
|
||
|
return {
|
||
|
status: 403,
|
||
|
ref: "Odmdb",
|
||
|
msg: "forbidden",
|
||
|
data: { person: apxid },
|
||
|
};
|
||
|
}
|
||
|
const data = {};
|
||
|
accessright.R.forEach((p) => {
|
||
|
data[p] = itm[p];
|
||
|
});
|
||
|
return { status: 200, ref: "Odmdb", msg: "found", data };
|
||
|
};
|
||
|
|
||
|
/**
|
||
|
* To get an array of item (itm) per primarykey with a list of field
|
||
|
* Object are store in objectPath/objectName/conf.json contain
|
||
|
*
|
||
|
* @objectPathname where object are store (where /object/conf.json indicate where the schema is)
|
||
|
* @uuidprimarykeyList list of uuid requested
|
||
|
* @role {xalias,xprofiles} allow to get accessright come from header
|
||
|
* @propertiesfilter (optionnal) key to return for each object (if undefined then return all)
|
||
|
* @Return {status:200; data:{uuid:{data filter by @propertiesfilter},uuid:"notfound"}}
|
||
|
|
||
|
ASUP doit être gerer au niveau des view des index
|
||
|
|
||
|
|
||
|
*/
|
||
|
Odmdb.ASUPreads = (objectPathname, apxidlist, role, propertiesfilter) => {
|
||
|
const res = { status: 200, data: {} };
|
||
|
const getschema = Odmdb.Schema(objectPathname, true);
|
||
|
if (getschema.status != 200) return getschema;
|
||
|
// Test if readable at least if owner
|
||
|
role.xprofils.push("owner");
|
||
|
const accessright = (Odmdb.accessright =
|
||
|
(getschema.data.schema.apxaccessright, role.xprofils));
|
||
|
if (!accessright.R) {
|
||
|
return {
|
||
|
status: 403,
|
||
|
ref: "Odmdb",
|
||
|
msg: "accessforbidden",
|
||
|
data: { crud: "R", accessright },
|
||
|
};
|
||
|
}
|
||
|
apxidlist.forEach((id) => {
|
||
|
if (fs.existsSync(`${objectPathname}/itm/${id}.json`)) {
|
||
|
const objectdata = fs.readJsonSync(`${objectPathname}/itm/${id}.json`);
|
||
|
if (objectdata.owner && objectdata.owner == role.xalias) {
|
||
|
if (!role.xprofils.includes("owner")) role.xprofils.push("owner");
|
||
|
} else {
|
||
|
if (role.xprofils.includes("owner"))
|
||
|
role.xprofils = role.xprofils.filter((e) => e !== "owner");
|
||
|
}
|
||
|
const accessright = Odmdb.accessright(
|
||
|
getschema.data.schema.apxaccessright,
|
||
|
role
|
||
|
);
|
||
|
if (!accessright.R) {
|
||
|
res.data[id] = "forbiden";
|
||
|
} else {
|
||
|
let newpropertiesfilter = Object.keys(objectdata);
|
||
|
if (accessright.R.length > 0) {
|
||
|
const setaccess = new Set(accessright.R);
|
||
|
if (!propertiesfilter) propertiesfilter = Object.keys(objectdata);
|
||
|
newpropertiesfilter = propertiesfilter.filter((f) =>
|
||
|
setaccess.has(f)
|
||
|
);
|
||
|
}
|
||
|
const objinfo = {};
|
||
|
newpropertiesfilter.forEach((k) => {
|
||
|
if (objectdata[k]) objinfo[k] = objectdata[k];
|
||
|
});
|
||
|
res.data[id] = objinfo;
|
||
|
}
|
||
|
} else {
|
||
|
res.data[id] = "notfound";
|
||
|
}
|
||
|
});
|
||
|
return res;
|
||
|
};
|
||
|
/**
|
||
|
* Convert profils in accessright
|
||
|
* @param {*} apxaccessright from schema object {profilname:{R}}
|
||
|
* @param {*} role {xprofils,xalias} accessible after isAuthenticated
|
||
|
* @returns access right to C create if present, to read (properties list or all if empty), to Update properties list or all if empty, D elete
|
||
|
* example: {"C":[],"R":[properties list],"U":[properties ist],"D":[]}
|
||
|
*/
|
||
|
Odmdb.accessright = (apxaccessrights, role) => {
|
||
|
//console.log("apxaccessrights",apxaccessrights)
|
||
|
//console.log("role",role)
|
||
|
const accessright = {};
|
||
|
role.xprofils.forEach((p) => {
|
||
|
if (apxaccessrights[p]) {
|
||
|
Object.keys(apxaccessrights[p]).forEach((act) => {
|
||
|
if (!accessright[act]) {
|
||
|
accessright[act] = apxaccessrights[p][act];
|
||
|
} else {
|
||
|
accessright[act] = [
|
||
|
...new Set([...accessright[act], ...apxaccessrights[p][act]]),
|
||
|
];
|
||
|
}
|
||
|
//console.log(act,accessright[act])
|
||
|
});
|
||
|
}
|
||
|
});
|
||
|
return accessright;
|
||
|
};
|
||
|
/**
|
||
|
* CUD a data itm into objectPathname if checkJson is valid
|
||
|
* and update idx
|
||
|
* idx is upto date for unique properties but not for list
|
||
|
* @param {string} objectpathname folder name where object are stored
|
||
|
* @param {object} itm an object respecting the checkJson schema in objectPathname/conf.json
|
||
|
* @param {string} crud: C reate U pdate D elete
|
||
|
* @param {array} role {xprofils,xalias} xprofils list of profils like anonymous,pagans, person owner is deuce if object properties owner is alias
|
||
|
|
||
|
* */
|
||
|
|
||
|
Odmdb.cud = (objectPathname, crud, itm, role, runindex = true) => {
|
||
|
const getschema = Odmdb.Schema(objectPathname, true);
|
||
|
if (getschema.status != 200) return getschema;
|
||
|
|
||
|
if (!itm[getschema.data.schema.apxid]) {
|
||
|
return {
|
||
|
status: 406,
|
||
|
ref: "Odmdb",
|
||
|
msg: "apxidmissing",
|
||
|
data: { missingkey: getschema.data.schema.apxid },
|
||
|
};
|
||
|
}
|
||
|
fs.ensureDirSync(`${objectPathname}/itm/`);
|
||
|
fs.ensureDirSync(`${objectPathname}/idx/`);
|
||
|
const existid = fs.existsSync(
|
||
|
`${objectPathname}/itm/${itm[getschema.data.schema.apxid]}.json`
|
||
|
);
|
||
|
|
||
|
/*const pathindex = `${objectPathname}/idx/lst_${getschema.data.schema.apxid}.json`;
|
||
|
if (!fs.existsSync(pathindex)) {
|
||
|
fs.outputJSONSync(pathindex, []);
|
||
|
fs.ensureDirSync(`${objectPathname}/itm/`);
|
||
|
}
|
||
|
const existid = fs
|
||
|
.readJSONSync(pathindex)
|
||
|
.includes(itm[getschema.data.schema.apxid]);
|
||
|
*/
|
||
|
if (existid && crud == "C") {
|
||
|
return {
|
||
|
status: 406,
|
||
|
ref: "Odmdb",
|
||
|
msg: "alreadyexist",
|
||
|
data: {
|
||
|
objectname: path.basename(objectPathname),
|
||
|
key: getschema.data.schema.apxid,
|
||
|
val: itm[getschema.data.schema.apxid],
|
||
|
},
|
||
|
};
|
||
|
}
|
||
|
if (!existid && ["U", "D"].includes(crud)) {
|
||
|
return {
|
||
|
status: 406,
|
||
|
ref: "Odmdb",
|
||
|
msg: "doesnotexist",
|
||
|
data: {
|
||
|
objectname: path.basename(objectPathname),
|
||
|
key: getschema.data.schema.apxid,
|
||
|
val: itm[getschema.data.schema.apxid],
|
||
|
},
|
||
|
};
|
||
|
}
|
||
|
const itmold = existid
|
||
|
? fs.readJSONSync(
|
||
|
`${objectPathname}/itm/${itm[getschema.data.schema.apxid]}.json`
|
||
|
)
|
||
|
: {};
|
||
|
if (existid && itmold.owner && itmold.owner == role.xalias) {
|
||
|
role.xprofils.push("owner");
|
||
|
}
|
||
|
if (!existid && crud == "C" && !itm.owner) {
|
||
|
// set owner cause this is a Create
|
||
|
itm.owner = role.xalias;
|
||
|
role.xprofils.push("owner");
|
||
|
}
|
||
|
//get accessright {C:[],R:[],U:[],D:[]} if exist means authorize, if array contain properties (for R and U) right is only allowed on properties
|
||
|
const accessright = Odmdb.accessright(
|
||
|
getschema.data.schema.apxaccessrights,
|
||
|
role
|
||
|
);
|
||
|
console.log("accessright", accessright);
|
||
|
if (
|
||
|
(crud == "C" && !accessright.C) ||
|
||
|
(crud == "D" && !accessright.D) ||
|
||
|
(crud == "U" && !accessright.U)
|
||
|
) {
|
||
|
return {
|
||
|
status: 403,
|
||
|
ref: "Odmdb",
|
||
|
msg: "accessforbidden",
|
||
|
data: { crud, accessright },
|
||
|
};
|
||
|
}
|
||
|
//delete or save
|
||
|
if (crud == "D") {
|
||
|
itmold["dt_delete"] = dayjs().toISOString();
|
||
|
fs.outputJSONSync(
|
||
|
`${objectPathname}/delitm/${itmold[getschema.data.schema.apxid]}.json`,
|
||
|
itmold
|
||
|
);
|
||
|
fs.rmSync(
|
||
|
`${objectPathname}/itm/${itmold[getschema.data.schema.apxid]}.json`
|
||
|
);
|
||
|
} else {
|
||
|
// if Create Update erase old version
|
||
|
let itmtostore = itm;
|
||
|
if (crud == "U" && accessright.U.length > 0) {
|
||
|
itmtostore = itmold;
|
||
|
accessright.U.forEach((p) => {
|
||
|
itmtostore[p] = itm[p];
|
||
|
});
|
||
|
itmtostore.dt_update = dayjs().toISOString();
|
||
|
}
|
||
|
if (crud == "C") itmtostore.dt_create = dayjs().toISOString();
|
||
|
// check consistency of datatostore
|
||
|
const chkdata = Checkjson.schema.data(
|
||
|
getschema.data.schema,
|
||
|
itmtostore,
|
||
|
false
|
||
|
);
|
||
|
if (chkdata.status != 200) return chkdata;
|
||
|
if (!getschema.data.schema.apxuniquekey)
|
||
|
getschema.data.schema.apxuniquekey = [];
|
||
|
console.log(`${objectPathname}/itm/${chkdata.data.apxid}.json`);
|
||
|
console.log(chkdata.data.itm);
|
||
|
fs.outputJSONSync(
|
||
|
`${objectPathname}/itm/${chkdata.data.apxid}.json`,
|
||
|
chkdata.data.itm
|
||
|
);
|
||
|
}
|
||
|
//console.log("getschema", getschema);
|
||
|
//rebuild index if requested
|
||
|
console.log("runidx", runindex);
|
||
|
console.log(objectPathname);
|
||
|
if (runindex) Odmdb.runidx(objectPathname, getschema.data.schema);
|
||
|
getschema.data.conf.lastupdatedata = dayjs().toISOString();
|
||
|
fs.outputJSONSync(`${objectPathname}/conf.json`, getschema.data.conf);
|
||
|
return {
|
||
|
status: 200,
|
||
|
ref: "Odmdb",
|
||
|
msg: "cudsuccessfull",
|
||
|
data: {},
|
||
|
};
|
||
|
};
|
||
|
/**
|
||
|
* apxidx:[list of index file
|
||
|
* { name: string, // filename in /idx/name.json
|
||
|
* type: array (list of unique value found in keyval), example: [val1, val2, val3,....] eventualy filter
|
||
|
* view (only unique key and object with objkey eventually filter by filter ) example: {idalias:{itm}}
|
||
|
* distribution (=> {val of keyval:[apxid itm value]} with )
|
||
|
* keyval:string, // a propertie of type string or array
|
||
|
* objkeys:[] and array of properties if empty [] means all properties, if 1 element => an array of this properties
|
||
|
* filter : an eval function base on itm data (itm.key) that return true (return data item) or false (ignore item)
|
||
|
*
|
||
|
*/
|
||
|
Odmdb.runidx = (objectPathname, schema) => {
|
||
|
console.log(`idx for ${objectPathname}`);
|
||
|
if (!schema || !schema.apxid) {
|
||
|
const getschema = Odmdb.Schema(objectPathname, true);
|
||
|
if (getschema.status != 200) return getschema;
|
||
|
schema = getschema.data.schema;
|
||
|
}
|
||
|
const ventil = {};
|
||
|
schema.apxidx.forEach((id) => {
|
||
|
ventil[id.name] = id;
|
||
|
if (id.keyval.includes(".")) {
|
||
|
// Means keyval is an object that can be only string for an idx
|
||
|
ventil[id.name].isobject = true;
|
||
|
ventil[id.name].isunique = false;
|
||
|
ventil[id.name].nbobjkey = 0;
|
||
|
ventil[id.name].keyvaltype = "string";
|
||
|
ventil[id.name].filter = id.filter ? id.filter.replace(/;/g, "") : ""; // check integrity of string
|
||
|
ventil[id.name].data = {};
|
||
|
} else {
|
||
|
ventil[id.name].isunique = schema.apxuniquekey.includes(id.keyval);
|
||
|
ventil[id.name].nbobjkey = id.objkey ? id.objkey.length : 0;
|
||
|
ventil[id.name].keyvaltype = schema.properties[id.keyval].type;
|
||
|
ventil[id.name].filter = id.filter ? id.filter.replace(/;/g, "") : ""; // check integrity of string
|
||
|
ventil[id.name].data = ventil[id.name].type == "array" ? [] : {};
|
||
|
}
|
||
|
});
|
||
|
glob.sync(`${objectPathname}/itm/*.json`).forEach((i) => {
|
||
|
const itm = fs.readJSONSync(i);
|
||
|
Object.keys(ventil).forEach((n) => {
|
||
|
let keep = true;
|
||
|
if (ventil[n].filter != "") {
|
||
|
try {
|
||
|
keep = eval(ventil[n].filter);
|
||
|
} catch (err) {
|
||
|
keep = false;
|
||
|
}
|
||
|
}
|
||
|
if (keep && ventil[n].type == "array" && itm[ventil[n].keyval]) {
|
||
|
if (ventil[n].keyvaltype == "array") {
|
||
|
itm[ventil[n].keyval].forEach((v) => {
|
||
|
if (!ventil[n].data.includes(v)) ventil[n].data.push(v);
|
||
|
});
|
||
|
} else {
|
||
|
if (!ventil[n].data.includes(itm[ventil[n].keyval]))
|
||
|
ventil[n].data.push(itm[ventil[n].keyval]);
|
||
|
}
|
||
|
}
|
||
|
if (
|
||
|
keep &&
|
||
|
ventil[n].type == "view" &&
|
||
|
ventil[n].isunique &&
|
||
|
itm[ventil[n].keyval]
|
||
|
) {
|
||
|
if (ventil[n].nbobjkey == 0)
|
||
|
ventil[n].data[itm[ventil[n].keyval]] = itm;
|
||
|
if (ventil[n].nbobjkey == 1)
|
||
|
ventil[n].data[itm[ventil[n].keyval]] = itm[ventil[n].objkey[0]];
|
||
|
if (ventil[n].nbobjkey > 1) {
|
||
|
const objdata = {};
|
||
|
Object.keys(itm).forEach((k) => {
|
||
|
if (ventil[n].objkey.includes(k)) objdata[k] = itm[k];
|
||
|
});
|
||
|
ventil[n].data[itm[ventil[n].keyval]] = objdata;
|
||
|
}
|
||
|
}
|
||
|
if (keep && ventil[n].type == "distribution" && itm[ventil[n].keyval]) {
|
||
|
const listval =
|
||
|
ventil[n].keyvaltype == "string"
|
||
|
? [itm[ventil[n].keyval]]
|
||
|
: itm[ventil[n].keyval];
|
||
|
// itm[ventil[n].keyval] is an array
|
||
|
listval.forEach((val) => {
|
||
|
if (!ventil[n].data[val]) ventil[n].data[val] = [];
|
||
|
ventil[n].data[val].push(itm[schema.apxid]);
|
||
|
});
|
||
|
}
|
||
|
if (keep && ventil[n].type == "distribution" && ventil[n].isobject && itm[ventil[n].keyval.split('.')[0]]) {
|
||
|
let itmval = JSON.parse(JSON.stringify(itm));
|
||
|
console.log( ventil[n].keyval)
|
||
|
console.log(itmval)
|
||
|
ventil[n].keyval
|
||
|
.split(".")
|
||
|
.forEach((i) => itmval = itmval[i] ? itmval[i] : null);
|
||
|
if (itmval) {
|
||
|
if (!ventil[n].data[itmval]) ventil[n].data[itmval] = [];
|
||
|
ventil[n].data[itmval].push(itm[schema.apxid]);
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
});
|
||
|
Object.keys(ventil).forEach((n) => {
|
||
|
//console.log(`${objectPathname}/idx/${ventil[n].name}.json`)
|
||
|
fs.outputJSON(
|
||
|
`${objectPathname}/idx/${ventil[n].name}.json`,
|
||
|
ventil[n].data
|
||
|
);
|
||
|
});
|
||
|
};
|
||
|
|
||
|
/**
|
||
|
* create/update idx from itm(s)
|
||
|
*
|
||
|
* @param {string} objectPathname
|
||
|
* @param {object} itm item to Create or to Update or {} if crud == I or crud == D
|
||
|
* @param {object} itmold (current item) if crud == U or D to get previous itm before change or {} if crud==I or C
|
||
|
* @param {letter} crud CUDI C add, U update, D delete I reindex
|
||
|
* @param {array} idx if specific request to rebuild list of idx only if [] then use schema one
|
||
|
* @param {object} schema if empty it use schema from Odmdb.Schema().data.schema
|
||
|
*
|
||
|
* example create alias 12 name fred:
|
||
|
* Odmdb.idxfromitm('.../tribes/ndda/persons',"C",{alias:'12',name:"fred"},{},[], {person schema})
|
||
|
* example update alias 12 in name freddy:
|
||
|
* Odmdb.idxfromitm('.../tribes/ndda/persons',"U",{alias:'12',name:"freddy"},{alias:'12',name:"fred"},[], {person schema})
|
||
|
* example delete alias 12:
|
||
|
* Odmdb.idxfromitm('.../tribes/ndda/persons',"D",{},{alias:'12',name:"fred"},[], {person schema})
|
||
|
* example to rebuild all index from scratch
|
||
|
* Odmdb.idxfromitm('.../tribes/ndda/persons',"I",{},{},[], {person schema})
|
||
|
* example to rebuild only publickey_alias index from scratch
|
||
|
* Odmdb.idxfromitm('.../tribes/ndda/pagans',"I",{},{},[{ name:"publickey_alias",keyval:"publickey",objkey:["alias"]}], {pagans schema})
|
||
|
*
|
||
|
*/
|
||
|
Odmdb.ASUPidxfromitm = (
|
||
|
objectPathname,
|
||
|
crud,
|
||
|
itm,
|
||
|
itmold,
|
||
|
idxs = [],
|
||
|
schema
|
||
|
) => {
|
||
|
console.log(`idxfromitem for ${objectPathname} action:${crud}`);
|
||
|
if (!schema || !schema.apxid) {
|
||
|
const getschema = Odmdb.Schema(objectPathname, true);
|
||
|
if (getschema.status != 200) return getschema;
|
||
|
schema = getschema.data.schema;
|
||
|
}
|
||
|
console.log(schema.apxuniquekey);
|
||
|
const itms = crud == "I" ? glob.sync(`${objectPathname}/itm/*.json`) : [itm];
|
||
|
console.log(itms);
|
||
|
if (crud == "I") {
|
||
|
//reinit all idx
|
||
|
idxs.forEach((idx) => {
|
||
|
fs.remove(`${objectPathname}/idx/${idx.name}.json`);
|
||
|
});
|
||
|
}
|
||
|
let idxtoreindex = []; //store index that has to be reprocessto get full context
|
||
|
idxs = idxs.length == 0 ? schema.apxidx : idxs; // get all index if none
|
||
|
itms.forEach((i) => {
|
||
|
if (crud == "I") {
|
||
|
itm = fs.readJSONSync(i);
|
||
|
}
|
||
|
//console.log(itm);
|
||
|
idxs.forEach((idx) => {
|
||
|
const keyvalisunique = schema.apxuniquekey.includes(idx.keyval); // check if keyval is unique mean store as an object (or string) else store as an array
|
||
|
const idxsrc = `${objectPathname}/idx/${idx.name}.json`;
|
||
|
const idxinit = idx.name.substring(0, 4) == "lst_" ? [] : {}; // select type of idx (array or object)
|
||
|
let idxfile = !fs.existsSync(idxsrc) ? idxinit : fs.readJSONSync(idxsrc);
|
||
|
if (idx.name.substring(0, 4) == "lst_") {
|
||
|
if (["D", "U"].includes(crud)) {
|
||
|
if (keyvalisunique) {
|
||
|
idxfile = idxfile.filter((e) => e !== itmold[idx.keyval]);
|
||
|
} else {
|
||
|
idxtoreindex.push(idx); //@todo
|
||
|
}
|
||
|
}
|
||
|
console.log(idx.keyval);
|
||
|
console.log(itm[idx.keyval]);
|
||
|
|
||
|
if (
|
||
|
["C", "U", "I"].includes(crud) &&
|
||
|
!idxfile.includes(itm[idx.keyval])
|
||
|
) {
|
||
|
idxfile.push(itm[idx.keyval]);
|
||
|
}
|
||
|
} else {
|
||
|
if (!idx.objkey) {
|
||
|
//mean all properties
|
||
|
idx.objkey = Object.keys(schema.properties);
|
||
|
}
|
||
|
if (keyvalisunique && idx.objkey.length == 1) {
|
||
|
if (["D", "U"].includes(crud)) {
|
||
|
delete idxfile[itmold[idx.keyval]];
|
||
|
} else {
|
||
|
idxfile[itm[idx.keyval]] = itm[idx.objkey[0]];
|
||
|
}
|
||
|
}
|
||
|
if (keyvalisunique && idx.objkey.length > 1) {
|
||
|
if (["D", "U"].includes(crud)) {
|
||
|
delete idxfile[itmold[idx.keyval]];
|
||
|
} else {
|
||
|
const itmfilter = {};
|
||
|
idx.objkey.forEach((i) => {
|
||
|
if (itm[i]) itmfilter[i] = itm[i];
|
||
|
});
|
||
|
idxfile[itm[idx.keyval]] = itmfilter;
|
||
|
}
|
||
|
}
|
||
|
if (!keyvalisunique && idx.objkey.length == 1) {
|
||
|
if (
|
||
|
["D", "U"].includes(crud) &&
|
||
|
idxfile[itmold[idx.keyval]].IndexOf(itmold[idx.objkey[0]]) > -1
|
||
|
) {
|
||
|
// U because need to remove previous value before adding it
|
||
|
idxfile[itmold[idx.keyval]].splice(
|
||
|
idxfile[itmold[idx.keyval]].IndexOf(itmold[idx.objkey[0]]),
|
||
|
1
|
||
|
);
|
||
|
}
|
||
|
if (["C", "U", "I"].includes(crud)) {
|
||
|
if (!idxfile[itm[idx.keyval]]) idxfile[itm[idx.keyval]] = [];
|
||
|
if (!idxfile[itm[idx.keyval]].includes(itm[idx.objkey[0]])) {
|
||
|
idxfile[itm[idx.keyval]].push(itm[idx.objkey[0]]);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
if (!keyvalisunique && idx.objkey.length > 1) {
|
||
|
if (["D", "U"].includes(crud) && idxfile[itmold[idx.keyval]]) {
|
||
|
// U because need to remove previous value before adding it
|
||
|
let arrayofit = [];
|
||
|
idxfile[itmold[idx.keyval]].forEach((it) => {
|
||
|
if (it[schema.apxid] != itm[schema.apxid]) arrayofit.push(it);
|
||
|
});
|
||
|
idxfile[itmold[idx.keyval]] = arrayofit;
|
||
|
}
|
||
|
if (["C", "U", "I"].includes(crud)) {
|
||
|
const itmfilter = {};
|
||
|
idx.objkey.forEach((i) => {
|
||
|
if (itm[i]) itmfilter[i] = itm[i];
|
||
|
});
|
||
|
if (!idxfile[itm[idx.keyval]]) idxfile[itm[idx.keyval]] = [];
|
||
|
idxfile[itm[idx.keyval]].push(itmfilter);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
fs.outputJSONSync(idxsrc, idxfile);
|
||
|
});
|
||
|
});
|
||
|
if (crud != "I") {
|
||
|
//update lastupdatedata to inform something change
|
||
|
const confschema = fs.readJSONSync(`${objectPathname}/conf.json`);
|
||
|
confschema.lastupdatedata = dayjs().toISOString();
|
||
|
fs.outputJSONSync(`${objectPathname}/conf.json`, confschema);
|
||
|
}
|
||
|
return { status: 200, ref: "Odmdb", msg: "successreindex", data: {} };
|
||
|
};
|
||
|
Odmdb.updatefromidxall = (objectname, idxname, data, lastupdate) => {
|
||
|
/**
|
||
|
* Update all itm of objectname from index idx/idxname with data
|
||
|
* if itm exist in local and not in data then /ojectname/conf.json.lastupdate = now
|
||
|
* not then /ojectname/conf.json.lastupdate = lastupdate
|
||
|
* this way mean next time server A want to refresh from B its lastupdate < than on A
|
||
|
*/
|
||
|
let conflastupdate = 0;
|
||
|
let localidx = {};
|
||
|
if (fs.existsSync(`../nationchains/${objectname}/idx/${idxname}`)) {
|
||
|
localidx = fs.readJsonSync(`../nationchains/${objectname}/idx/${idxname}`);
|
||
|
}
|
||
|
Object.keys(data).forEach((id) => {
|
||
|
if (localidx[id]) {
|
||
|
if (
|
||
|
localidx[id].dt_update &&
|
||
|
data[id].dt_update &&
|
||
|
localidx[id].dt_update > data[id].dt_update
|
||
|
) {
|
||
|
// means local information is fresher than the one in data for replacement
|
||
|
// .toISIString ex: 2019-01-25T02:00:00.000Z'
|
||
|
conflastupdate = dayjs().toISOString();
|
||
|
} else {
|
||
|
// replace itm with data
|
||
|
localidx[id] = data[id];
|
||
|
fs.outputJsonSync(
|
||
|
`../nationchains/${objectname}/itm/${id}.json`,
|
||
|
data[id]
|
||
|
);
|
||
|
}
|
||
|
} else {
|
||
|
// add itm
|
||
|
localidx[id] = data[id];
|
||
|
fs.outputJsonSync(
|
||
|
`../nationchains/${objectname}/itm/${id}.json`,
|
||
|
data[id]
|
||
|
);
|
||
|
}
|
||
|
});
|
||
|
//check if it miss id in fresher update means conf.lastupdate will be now to indicate
|
||
|
Object.keys(localidx).forEach((id) => {
|
||
|
if (!data[id]) {
|
||
|
conflastupdate = dayjs().toISOString();
|
||
|
}
|
||
|
});
|
||
|
// update the object files
|
||
|
if (conflastupdate == 0) conflastupdate = lastupdate;
|
||
|
fs.outputJSONSync(`../nationchains/${objectname}/idx/${idxname}`, localidx);
|
||
|
const objconf = fs.readJsonSync(`../nationchains/${objectname}/conf.json`);
|
||
|
objconf.lastupdate = conflastupdate;
|
||
|
fs.outputJsonSync(`../nationchains/${objectname}/conf.json`, objconf);
|
||
|
return {
|
||
|
status: 200,
|
||
|
ref: "Odmdb.js",
|
||
|
info: "Successfullupdate",
|
||
|
data: { objectname, idxname, lastupdate },
|
||
|
};
|
||
|
};
|
||
|
module.exports = Odmdb;
|