apxtrib/api/models/Odmdb.js

680 lines
22 KiB
JavaScript
Raw Normal View History

2023-04-13 05:46:35 +00:00
const glob = require("glob");
const path = require("path");
const fs = require("fs-extra");
2023-05-31 13:19:21 +00:00
const dayjs = require("dayjs");
2023-06-28 13:23:17 +00:00
const axios = require("axios");
2023-05-31 13:19:21 +00:00
const conf = require(`${process.env.dirtown}/conf.json`);
2023-04-13 05:46:35 +00:00
const Checkjson = require(`./Checkjson.js`);
2023-11-05 11:03:25 +00:00
/**
* This manage Objects for indexing, searching, checking and act to CRUD
* @objectPathName = objectpath/objectname
* objectpath/objectname/conf.json
* /idx/all_key1.json = {key1value:{object}}
* lst_key1.json = [key1valA,key1val2,...]
* key2_key1.json = {key2value:[key1value]}
* all_key1_filtername = {key1value:{object}}
* /itm/apxidvalue.json
* in conf.json:
* {
* "name": "object name ex:'nations'",
* "schema": "relativ schema from dirapi dirtown ex:'adminapi/schema/nations.json'"",
* "lastupdateschema": 0, time stamp last schema update
* "lastupdatedata":0 time stamp last itm update
* }
*
* Specifics key in schema to apXtrib:
* apxid : the field value to use to store item
* apxuniquekey : list of field that has to be unique you cannot have 2 itm with same key value
* apxidx : list of index file /idx/
* { "name":"lst_fieldA", "keyval": "alias" }, => lst_fieldA.json = [fieldAvalue1,...]
{ "name":"all_fieldA", "keyval": "fieldA" }, => all_fieldA.json =
if fieldA in apxuniquekey = {fieldAvalue1:{object}}
not in apxuniquekey = {fieldAvalue1:[{object}]}
{ "name":"word_fieldA", "keyval": "fieldA", "objkey": ["fieldB"]}, => word_fieldA.json
if fieldA in apxuniquekey = {fieldAvalue1:fieldBvalue,}
if fieldA not in uniquekey = {fieldAvalue1: [fieldBv1,fieldBv2,]}
{ "name":"word_fieldA", "keyval": "fieldA", "objkey": ["fieldB","fieldC"]}, => word_fieldA.json
if fieldA in apxuniquekey = {fieldAvalue1:{fieldB:val,fieldC:val},}
if fieldA not in uniquekey = {fieldAvalue1: [{fieldB:val,fieldC:val},]}
* apxaccessrights : list of profil with CRUD accesrights
2023-04-13 05:46:35 +00:00
2023-11-05 11:03:25 +00:00
**/
2023-04-13 05:46:35 +00:00
2023-11-05 11:03:25 +00:00
const Odmdb = {};
2023-04-13 05:46:35 +00:00
2023-11-05 11:03:25 +00:00
/**
* @api syncObject
* @param {string} url to an existing object conf (/objectname/conf.json)
* @param {timestamp} timestamp
* 0 => rebuild local object from all_{idapx}.json
* >0 => update itm and idx search by datetime
* @param
*/
Odmdb.syncObject = () => {};
2023-05-31 13:19:21 +00:00
2023-11-05 11:03:25 +00:00
/**
* @api createObject: create a space to host object
*
* @source {string} "new", url,
* @schemapath {string} path to create or replace a schema ${schemaPath}/schema/
* @objectPath {string} path where object are store
* @objectName {string} name of the object
* @schema {object} the json schema for this object
* @lgjson {object} the json file for a specific language
* @lg {string} the 2 letters language
*
* Setup a new object localy =>
* source
*
* - from scratch =>
* Create
* - from a synchronization
* Download from source all_{apxid}.json
*
*
*/
Odmdb.createObject = (
source,
schemaPath,
objectPath,
objectName,
schema,
lgjson,
lg
) => {
2023-05-31 13:19:21 +00:00
if (!fs.existsSync(schemaPath)) {
return {
status: 404,
ref: "Odmdb",
info: "pathnamedoesnotexist",
moreinfo: { fullpath: schemaPath },
};
2023-04-13 05:46:35 +00:00
}
2023-05-31 13:19:21 +00:00
if (!fs.existsSync(objectPath)) {
return {
status: 404,
ref: "Odmdb",
info: "pathnamedoesnotexist",
moreinfo: { fullpath: objectPath },
};
2023-04-13 05:46:35 +00:00
}
// store schema file if not empty undefined or {}
2023-05-31 13:19:21 +00:00
if (
schema &&
!(Object.keys(schema).length === 0 && schema.constructor === Object)
) {
fs.outputJSONSync(`${schemaPath}/schema/${objectName}.json`, schema, {
spaces: 2,
});
2023-04-13 05:46:35 +00:00
}
2023-05-31 13:19:21 +00:00
if (
lgjson &&
lg &&
!(Object.keys(lgjson).length === 0 && lgjson.constructor === Object)
) {
fs.outputJSONSync(`${schemaPath}/lg/${objectName}_${lg}.json`, lgjson, {
spaces: 2,
});
2023-04-13 05:46:35 +00:00
}
//create environnement object with the new schema config
2023-05-31 13:19:21 +00:00
if (!fs.existsSync(`${objectPath}/${objectName}`)) {
fs.outputJsonSync(
`${objectPath}/${objectName}/idx/confjson`,
{ schema: `${schemaPath}/schema/${objectName}.json` },
{ spaces: 2 }
);
2023-04-13 05:46:35 +00:00
}
2023-05-31 13:19:21 +00:00
return { status: 200 };
};
2023-04-13 05:46:35 +00:00
2023-11-05 11:03:25 +00:00
/**
* Update an object
* @param {string} objectPathname
* @param {object} meta update request
* lg:
* lgobj: object to store in /lg/objectname_lg.json
* schema: an update schema
* @return {status, ref:"Odmdb", msg:"", data}
*
* Create a tmp object env and check existing object to identify any issues
* If all is fine then apply change by replacing
*/
Odmdb.updateObject = (objectPathname, meta) => {};
/**
* Get a schema from objectPathname
*
* todo only local schema => plan a sync each 10minutes
* @schemaPath local path adminapi/schema/objectName.json or /tribename/schema/objectName
* @validschema boolean if necessary to check schema or not mainly use when change schema
* @return {status:200,data:{conf:"schemaconf",schema:"schemacontent"} }
*/
Odmdb.Schema = (objectPathname, validschema) => {
const confschema = fs.readJsonSync(`${objectPathname}/conf.json`);
let schemaPath = confschema.schema;
const res = {
2023-04-13 05:46:35 +00:00
status: 200,
2023-11-05 11:03:25 +00:00
ref: "Odmdb",
msg: "getschema",
data: { conf: confschema },
2023-04-13 05:46:35 +00:00
};
2023-11-05 11:03:25 +00:00
if (schemaPath.slice(-5) != ".json") schemaPath += ".json";
2023-06-28 13:23:17 +00:00
if (schemaPath.substring(0, 4) == "http") {
// lance requete http pour recuperer le schema avec un await axios
} else {
2023-11-05 11:03:25 +00:00
if (schemaPath.substring(0, 9) == "adminapi/") {
schemaPath = `${conf.dirapi}/${schemaPath}`;
} else {
schemaPath = `${conf.dirtown}/tribes/${schemaPath}`;
2023-06-28 13:23:17 +00:00
}
2023-11-05 11:03:25 +00:00
if (!fs.existsSync(schemaPath)) {
return {
status: 404,
ref: "Odmdb",
msg: "schemanotfound",
data: { schemaPath, schema: {} },
};
2023-06-28 13:23:17 +00:00
}
2023-11-05 11:03:25 +00:00
res.data.schema = fs.readJsonSync(schemaPath);
if (!res.data.schema.apxid) {
return {
status: 406,
ref: "Odmdb",
msg: "missingprimarykey",
data: {},
};
}
if (res.data.schema.apxidx) {
//add apxidx to apxuniquekey in case not
if (!res.data.schema.apxuniquekey.includes(res.data.schema.apxid)) {
res.data.schema.apxuniquekey.push(res.data.schema.apxid);
}
res.data.schema.apxidx.forEach((idx) => {
if (
idx.objkey &&
!res.data.schema.apxuniquekey.includes(idx.keyval) &&
!idx.objkey.includes(res.data.schema.apxid)
) {
return {
status: 406,
ref: "Odmdb",
msg: "unconsistencyapxidx",
data: {
name: idx.name,
keyval: idx.keyval,
objkey: idx.objkey,
apxid: res.data.schema.apxid,
},
};
}
});
}
if (validschema || 1 == 1) {
// return {status:200, ref, msg} or {status!:200,multimsg:[{ref,msg;data}]}
const check = Checkjson.schema.validation(res.data.schema);
if (check.status != 200) {
res.multimsg = check.multimsg;
res.status = check.status;
2023-06-28 13:23:17 +00:00
}
}
}
2023-11-05 11:03:25 +00:00
return res;
};
2023-04-13 05:46:35 +00:00
Odmdb.search = (objectPath, objectName, search) => {
/*
@search= {
txt: string,
algo: match | pattern | fuzzy
fieldstring:[list of field],
indexfilter:{index1:[val1,val2 | ] }
}
Return data:[uuids]
example: search exact match hill in townId
heavy search={txt:"hill",algo:"match",fieldstring:"toxnId"}
light search={txt:"hill", algo:"match", indexfilter:{"key":"townId","value":[]}}
light search={txt:"hill", algo:"match", indexfilter:{"key":"nationId","value":"ants"}}
*/
const schema = Odmdb.schema(objectPath, objectName);
if (schema.status != 200) return schema;
};
2023-11-05 11:03:25 +00:00
/**
* To get an array of item (itm) per primarykey with a list of field
* Object are store in objectPath/objectName/conf.json contain
*
* @objectPathname where object are store (where /object/conf.json indicate where the schema is)
* @uuidprimarykeyList list of uuid requested
* @role {xalias,xprofiles} allow to get accessright come from header
* @propertiesfilter (optionnal) key to return for each object (if undefined then return all)
* @Return {status:200; data:{uuid:{data filter by @propertiesfilter},uuid:"notfound"}}
*/
Odmdb.reads = (objectPathname, apxidlist, role, propertiesfilter) => {
2023-04-13 05:46:35 +00:00
const res = { status: 200, data: {} };
2023-11-05 11:03:25 +00:00
const getschema = Odmdb.Schema(objectPathname, true);
if (getschema.status != 200) return getschema;
// Test if readable at least if owner
role.xprofils.push("owner");
const accessright = (Odmdb.accessright =
(getschema.data.schema.apxaccessright, role.xprofils));
if (!accessright.R) {
return {
status: 403,
ref: "Odmdb",
msg: "accessforbidden",
data: { crud: "R", accessright },
};
}
apxidlist.forEach((id) => {
if (fs.existsSync(`${objectPathname}/itm/${id}.json`)) {
const objectdata = fs.readJsonSync(`${objectPathname}/itm/${id}.json`);
if (objectdata.owner && objectdata.owner == role.xalias) {
if (!role.xprofils.includes("owner")) role.xprofils.push("owner");
} else {
if (role.xprofils.includes("owner"))
role.xprofils = role.xprofils.filter((e) => e !== "owner");
}
const accessright = Odmdb.accessright(
getschema.data.schema.apxaccessright,
role
2023-04-13 05:46:35 +00:00
);
2023-11-05 11:03:25 +00:00
if (!accessright.R) {
res.data[id] = "forbiden";
2023-04-13 05:46:35 +00:00
} else {
2023-11-05 11:03:25 +00:00
let newpropertiesfilter = Object.keys(objectdata);
if (accessright.R.length > 0) {
const setaccess = new Set(accessright.R);
if (!propertiesfilter) propertiesfilter = Object.keys(objectdata);
newpropertiesfilter = propertiesfilter.filter((f) =>
setaccess.has(f)
);
}
2023-04-13 05:46:35 +00:00
const objinfo = {};
2023-11-05 11:03:25 +00:00
newpropertiesfilter.forEach((k) => {
2023-04-13 05:46:35 +00:00
if (objectdata[k]) objinfo[k] = objectdata[k];
});
2023-11-05 11:03:25 +00:00
res.data[id] = objinfo;
2023-04-13 05:46:35 +00:00
}
} else {
2023-11-05 11:03:25 +00:00
res.data[id] = "notfound";
2023-04-13 05:46:35 +00:00
}
});
return res;
};
2023-11-05 11:03:25 +00:00
/**
* Convert profils in accessright
* @param {*} apxaccessright from schema object {profilname:{R}}
* @param {*} role {xprofils,xalias} accessible after isAuthenticated
* @returns access right to C create if present, to read (properties list or all if empty), to Update properties list or all if empty, D elete
* example: {"C":[],"R":[properties list],"U":[properties ist],"D":[]}
*/
Odmdb.accessright = (apxaccessrights, role) => {
const accessright = {};
role.xprofils.forEach((p) => {
if (apxaccessrights[p]) {
Object.keys(apxaccessrights[p]).forEach((act) => {
if (!accessright[act]) {
accessright[act] = apxaccessrights[p][act];
} else {
accessright[act] = [
...new Set([...accessright[act], ...apxaccessrights[p][act]]),
];
}
});
}
});
return accessright;
2023-04-13 05:46:35 +00:00
};
2023-11-05 11:03:25 +00:00
/**
* CUD a data itm into objectPathname if checkJson is valid
* and update idx
* idx is upto date for unique properties but not for list
* @param {string} objectpathname folder name where object are stored
* @param {object} itm an object respecting the checkJson schema in objectPathname/conf.json
* @param {string} crud: C reate U pdate D elete
* @param {array} role {xprofils,xalias} xprofils list of profils like anonymous,pagans, person owner is deuce if object properties owner is alias
2023-06-28 13:23:17 +00:00
2023-11-05 11:03:25 +00:00
* */
Odmdb.cud = (objectPathname, crud, itm, role) => {
const getschema = Odmdb.Schema(objectPathname, true);
if (getschema.status != 200) return getschema;
if (!itm[getschema.data.schema.apxid]) {
return {
status: 406,
ref: "Odmdb",
msg: "apxidmissing",
data: { missingkey: getschema.data.schema.apxid },
};
}
2023-11-19 15:34:37 +00:00
const pathindex=`${objectPathname}/idx/lst_${getschema.data.schema.apxid}.json`;
if (!fs.existsSync(pathindex)){
fs.outputJSONSync(pathindex,[])
fs.ensureDirSync(`${objectPathname}/itm/`);
}
2023-11-05 11:03:25 +00:00
const existid = fs
2023-11-19 15:34:37 +00:00
.readJSONSync(pathindex)
2023-11-05 11:03:25 +00:00
.includes(itm[getschema.data.schema.apxid]);
if (existid && crud == "C") {
return {
status: 406,
ref: "Odmdb",
msg: "alreadyexist",
data: {
objectname: path.basename(objectPathname),
key: getschema.data.schema.apxid,
val: itm[getschema.data.schema.apxid],
},
};
}
if (!existid && ["U", "D"].includes(crud)) {
return {
status: 406,
ref: "Odmdb",
msg: "doesnotexist",
data: {
objectname: path.basename(objectPathname),
key: getschema.data.schema.apxid,
val: itm[getschema.data.schema.apxid],
},
};
}
const itmold = existid
? fs.readJSONSync(
`${objectPathname}/itm/${itm[getschema.data.schema.apxid]}.json`
)
: {};
if (existid && itmold.owner && itmold.owner == role.xalias) {
role.xprofils.push("owner");
} else {
// set owner cause this is a Create
itm.owner = role.xalias;
}
2023-11-19 15:34:37 +00:00
//get accessright {C:[],R:[],U:[],D:[]} if exist means authorize, if array contain properties (for R and U) right is only allowed on properties
2023-11-05 11:03:25 +00:00
const accessright = Odmdb.accessright(
getschema.data.schema.apxaccessrights,
role
);
console.log("accessright", accessright);
if (
(crud == "C" && !accessright.C) ||
(crud == "D" && !accessright.D) ||
(crud == "U" && !accessright.U)
) {
return {
status: 403,
ref: "Odmdb",
msg: "accessforbidden",
data: { crud, accessright },
};
}
//delete or save
if (crud == "D") {
itmold["dt_delete"] = dayjs();
fs.outputJSONSync(
`${objectPathname}/delitm/${itmold[getschema.data.schema.apxid]}.json`,
itmold
);
fs.rmSync(
`${objectPathname}/itm/${itmold[getschema.data.schema.apxid]}.json`
);
} else {
// if Create Update erase old version
let itmtostore = itm;
if (crud == "U" && accessright.U.length > 0) {
itmtostore = itmold;
accessright.U.forEach((p) => {
itmtostore[p] = itm[p];
});
itmtostore.dt_update = dayjs();
}
if (crud == "C") itmtostore.dt_create = dayjs();
// check consistency of datatostore
const chkdata = Checkjson.schema.data(
getschema.data.schema,
itmtostore,
false
);
if (chkdata.status != 200) return chkdata;
if (!getschema.data.schema.apxuniquekey)
getschema.data.schema.apxuniquekey = [];
fs.outputJSONSync(
`${objectPathname}/itm/${chkdata.data.apxid}.json`,
chkdata.data.itm
);
}
console.log("getschema", getschema);
//update idx
Odmdb.idxfromitm(
objectPathname,
crud,
itm,
itmold,
[],
getschema.data.schema
);
getschema.data.conf.lastupdatedata = dayjs();
fs.outputJSONSync(`${objectPathname}/conf.json`, getschema.data.conf);
return {
status: 200,
ref: "Odmdb",
msg: "cudsuccessfull",
data: { itm: chkdata.data.itm },
};
2023-04-13 05:46:35 +00:00
};
2023-11-05 11:03:25 +00:00
/**
* create/update idx from itm(s)
*
* @param {string} objectPathname
* @param {object} itm item to Create or to Update or {} if crud == I or crud == D
* @param {object} itmold (current item) if crud == U or D to get previous itm before change or {} if crud==I or C
* @param {letter} crud CUDI C add, U update, D delete I reindex
* @param {array} idx if specific request to rebuild list of idx only if [] then use schema one
* @param {object} schema if empty it use schema from Odmdb.Schema().data.schema
*
* example create alias 12 name fred:
* Odmdb.idxfromitm('.../tribes/ndda/persons',"C",{alias:'12',name:"fred"},{},[], {person schema})
* example update alias 12 in name freddy:
* Odmdb.idxfromitm('.../tribes/ndda/persons',"U",{alias:'12',name:"freddy"},{alias:'12',name:"fred"},[], {person schema})
* example delete alias 12:
* Odmdb.idxfromitm('.../tribes/ndda/persons',"D",{},{alias:'12',name:"fred"},[], {person schema})
* example to rebuild all index from scratch
* Odmdb.idxfromitm('.../tribes/ndda/persons',"I",{},{},[], {person schema})
* example to rebuild only publickey_alias index from scratch
* Odmdb.idxfromitm('.../tribes/ndda/pagans',"I",{},{},[{ name:"publickey_alias",keyval:"publickey",objkey:["alias"]}], {pagans schema})
*
*/
Odmdb.idxfromitm = (objectPathname, crud, itm, itmold, idxs = [], schema) => {
console.log(`idxfromitem for ${objectPathname} action:${crud}`);
if (!schema || !schema.apxid) {
const getschema = Odmdb.Schema(objectPathname, true);
if (getschema.status != 200) return getschema;
schema = getschema.data.schema;
}
console.log(schema.apxuniquekey);
const itms = crud == "I" ? glob.sync(`${objectPathname}/itm/*.json`) : [itm];
console.log(itms);
if (crud == "I") {
//reinit all idx
idxs.forEach((idx) => {
fs.remove(`${objectPathname}/idx/${idx.name}.json`);
});
}
let idxtoreindex = []; //store index that has to be reprocessto get full context
idxs = idxs.length == 0 ? schema.apxidx : idxs; // get all index if none
itms.forEach((i) => {
if (crud == "I") {
itm = fs.readJSONSync(i);
}
//console.log(itm);
idxs.forEach((idx) => {
const keyvalisunique = schema.apxuniquekey.includes(idx.keyval); // check if keyval is unique mean store as an object (or string) else store as an array
const idxsrc = `${objectPathname}/idx/${idx.name}.json`;
const idxinit = idx.name.substring(0, 4) == "lst_" ? [] : {}; // select type of idx (array or object)
let idxfile = !fs.existsSync(idxsrc) ? idxinit : fs.readJSONSync(idxsrc);
if (idx.name.substring(0, 4) == "lst_") {
if (["D", "U"].includes(crud)) {
if (keyvalisunique) {
idxfile = idxfile.filter((e) => e !== itmold[idx.keyval]);
} else {
idxtoreindex.push(idx); //@todo
}
}
console.log(idx.keyval);
console.log(itm[idx.keyval]);
2023-05-31 13:19:21 +00:00
2023-11-05 11:03:25 +00:00
if (
["C", "U", "I"].includes(crud) &&
!idxfile.includes(itm[idx.keyval])
) {
idxfile.push(itm[idx.keyval]);
}
} else {
if (!idx.objkey) {
//mean all properties
idx.objkey = Object.keys(schema.properties);
}
if (keyvalisunique && idx.objkey.length == 1) {
if (["D", "U"].includes(crud)) {
delete idxfile[itmold[idx.keyval]];
} else {
idxfile[itm[idx.keyval]] = itm[idx.objkey[0]];
}
}
if (keyvalisunique && idx.objkey.length > 1) {
if (["D", "U"].includes(crud)) {
delete idxfile[itmold[idx.keyval]];
} else {
const itmfilter = {};
idx.objkey.forEach((i) => {
if (itm[i]) itmfilter[i] = itm[i];
});
idxfile[itm[idx.keyval]] = itmfilter;
}
}
if (!keyvalisunique && idx.objkey.length == 1) {
if (
["D", "U"].includes(crud) &&
idxfile[itmold[idx.keyval]].IndexOf(itmold[idx.objkey[0]]) > -1
) {
// U because need to remove previous value before adding it
idxfile[itmold[idx.keyval]].splice(
idxfile[itmold[idx.keyval]].IndexOf(itmold[idx.objkey[0]]),
1
);
}
if (["C", "U", "I"].includes(crud)) {
if (!idxfile[itm[idx.keyval]]) idxfile[itm[idx.keyval]] = [];
if (!idxfile[itm[idx.keyval]].includes(itm[idx.objkey[0]])) {
idxfile[itm[idx.keyval]].push(itm[idx.objkey[0]]);
}
}
}
if (!keyvalisunique && idx.objkey.length > 1) {
if (["D", "U"].includes(crud) && idxfile[itmold[idx.keyval]]) {
// U because need to remove previous value before adding it
let arrayofit = [];
idxfile[itmold[idx.keyval]].forEach((it) => {
if (it[schema.apxid] != itm[schema.apxid]) arrayofit.push(it);
});
idxfile[itmold[idx.keyval]] = arrayofit;
}
if (["C", "U", "I"].includes(crud)) {
const itmfilter = {};
idx.objkey.forEach((i) => {
if (itm[i]) itmfilter[i] = itm[i];
});
if (!idxfile[itm[idx.keyval]]) idxfile[itm[idx.keyval]] = [];
idxfile[itm[idx.keyval]].push(itmfilter);
}
}
}
fs.outputJSONSync(idxsrc, idxfile);
});
});
if (crud != "I") {
//update lastupdatedata to inform something change
const confschema = fs.readJSONSync(`${objectPathname}/conf.json`);
confschema.lastupdatedata = dayjs();
fs.outputJSONSync(`${objectPathname}/conf.json`, getschema.data.conf);
}
return { status: 200, ref: "Odmdb", msg: "successreindex", data: {} };
};
2023-05-31 13:19:21 +00:00
Odmdb.updatefromidxall = (objectname, idxname, data, lastupdate) => {
/**
* Update all itm of objectname from index idx/idxname with data
* if itm exist in local and not in data then /ojectname/conf.json.lastupdate = now
* not then /ojectname/conf.json.lastupdate = lastupdate
* this way mean next time server A want to refresh from B its lastupdate < than on A
*/
let conflastupdate = 0;
let localidx = {};
if (
fs.existsSync(`${conf.dirapi}/nationchains/${objectname}/idx/${idxname}`)
) {
localidx = fs.readJsonSync(
`${conf.dirapi}/nationchains/${objectname}/idx/${idxname}`
);
}
Object.keys(data).forEach((id) => {
if (localidx[id]) {
if (
localidx[id].dt_update &&
data[id].dt_update &&
localidx[id].dt_update > data[id].dt_update
) {
// means local information is fresher than the one in data for replacement
2023-06-28 13:23:17 +00:00
// .toISIString ex: 2019-01-25T02:00:00.000Z'
conflastupdate = dayjs().toISOString();
2023-05-31 13:19:21 +00:00
} else {
// replace itm with data
localidx[id] = data[id];
fs.outputJsonSync(
`${conf.dirapi}/nationchains/${objectname}/itm/${id}.json`,
data[id]
);
}
} else {
// add itm
localidx[id] = data[id];
fs.outputJsonSync(
`${conf.dirapi}/nationchains/${objectname}/itm/${id}.json`,
data[id]
);
}
});
//check if it miss id in fresher update means conf.lastupdate will be now to indicate
Object.keys(localidx).forEach((id) => {
if (!data[id]) {
conflastupdate = dayjs();
}
});
// update the object files
if (conflastupdate == 0) conflastupdate = lastupdate;
fs.outputJSONSync(
`${conf.dirapi}/nationchains/${objectname}/idx/${idxname}`,
localidx
);
const objconf = fs.readJsonSync(
`${conf.dirapi}/nationchains/${objectname}/conf.json`
);
objconf.lastupdate = conflastupdate;
fs.outputJsonSync(
`${conf.dirapi}/nationchains/${objectname}/conf.json`,
objconf
);
return {
status: 200,
ref: "Odmdb.js",
info: "Successfullupdate",
data: { objectname, idxname, lastupdate },
};
};
2023-04-13 05:46:35 +00:00
module.exports = Odmdb;