2023-12-07 12:04:19 +01:00
|
|
|
const glob = require("glob");
|
|
|
|
const path = require("path");
|
|
|
|
const fs = require("fs-extra");
|
|
|
|
const dayjs = require("dayjs");
|
|
|
|
const axios = require("axios");
|
2024-03-20 11:24:03 +01:00
|
|
|
const conf = require(`../../../conf.json`);
|
2023-12-07 12:04:19 +01:00
|
|
|
const Checkjson = require(`./Checkjson.js`);
|
|
|
|
const { promiseHooks } = require("v8");
|
2024-02-16 08:39:42 +01:00
|
|
|
const currentmod = "Odmdb";
|
|
|
|
const log = conf.api.activelog.includes(currentmod);
|
2023-12-07 12:04:19 +01:00
|
|
|
/**
|
|
|
|
* This manage Objects for indexing, searching, checking and act to CRUD
|
|
|
|
* @objectPathName = objectpath/objectname
|
|
|
|
* objectpath/objectname/conf.json
|
|
|
|
* /idx/all_key1.json = {key1value:{object}}
|
|
|
|
* lst_key1.json = [key1valA,key1val2,...]
|
|
|
|
* key2_key1.json = {key2value:[key1value]}
|
|
|
|
* all_key1_filtername = {key1value:{object}}
|
|
|
|
* /itm/apxidvalue.json
|
|
|
|
* in conf.json:
|
|
|
|
* {
|
|
|
|
* "name": "object name ex:'nations'",
|
|
|
|
* "schema": "relativ schema from dirapi dirtown ex:'adminapi/schema/nations.json'"",
|
|
|
|
* "lastupdateschema": 0, time stamp last schema update
|
|
|
|
* "lastupdatedata":0 time stamp last itm update
|
|
|
|
* }
|
|
|
|
*
|
|
|
|
* Specifics key in schema to apxtri:
|
2024-02-16 08:39:42 +01:00
|
|
|
* apxid : the field value to use to store item apx
|
2023-12-07 12:04:19 +01:00
|
|
|
* apxuniquekey : list of field that has to be unique you cannot have 2 itm with same key value
|
|
|
|
* apxidx : list of index file /idx/
|
|
|
|
* { "name":"lst_fieldA", "keyval": "alias" }, => lst_fieldA.json = [fieldAvalue1,...]
|
|
|
|
{ "name":"all_fieldA", "keyval": "fieldA" }, => all_fieldA.json =
|
|
|
|
if fieldA in apxuniquekey = {fieldAvalue1:{object}}
|
|
|
|
not in apxuniquekey = {fieldAvalue1:[{object}]}
|
|
|
|
{ "name":"word_fieldA", "keyval": "fieldA", "objkey": ["fieldB"]}, => word_fieldA.json
|
|
|
|
if fieldA in apxuniquekey = {fieldAvalue1:fieldBvalue,}
|
|
|
|
if fieldA not in uniquekey = {fieldAvalue1: [fieldBv1,fieldBv2,]}
|
|
|
|
{ "name":"word_fieldA", "keyval": "fieldA", "objkey": ["fieldB","fieldC"]}, => word_fieldA.json
|
|
|
|
if fieldA in apxuniquekey = {fieldAvalue1:{fieldB:val,fieldC:val},}
|
|
|
|
if fieldA not in uniquekey = {fieldAvalue1: [{fieldB:val,fieldC:val},]}
|
|
|
|
* apxaccessrights : list of profil with CRUD accesrights
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
**/
|
|
|
|
|
|
|
|
const Odmdb = {};
|
|
|
|
|
|
|
|
/**
|
2024-03-15 08:49:23 +01:00
|
|
|
*const Checkjson = require(`../../../adminapi/apxtri/models/Checkjson`);
|
2024-02-16 08:39:42 +01:00
|
|
|
@api syncObject
|
2023-12-07 12:04:19 +01:00
|
|
|
* @param {string} url to an existing object conf (/objectname/conf.json)
|
|
|
|
* @param {timestamp} timestamp
|
|
|
|
* 0 => rebuild local object from all_{idapx}.json
|
|
|
|
* >0 => update itm and idx search by datetime
|
|
|
|
* @param
|
|
|
|
*/
|
|
|
|
Odmdb.syncObject = () => {};
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @api createObject: create a space to host object
|
|
|
|
*
|
|
|
|
* @source {string} "new", url,
|
|
|
|
* @schemapath {string} path to create or replace a schema ${schemaPath}/schema/
|
|
|
|
* @objectPath {string} path where object are store
|
|
|
|
* @objectName {string} name of the object
|
|
|
|
* @schema {object} the json schema for this object
|
|
|
|
* @lgjson {object} the json file for a specific language
|
|
|
|
* @lg {string} the 2 letters language
|
|
|
|
*
|
|
|
|
* Setup a new object localy =>
|
|
|
|
* source
|
|
|
|
*
|
|
|
|
* - from scratch =>
|
|
|
|
* Create
|
|
|
|
* - from a synchronization
|
|
|
|
* Download from source all_{apxid}.json
|
|
|
|
*
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
Odmdb.createObject = (
|
|
|
|
source,
|
|
|
|
schemaPath,
|
|
|
|
objectPath,
|
|
|
|
objectName,
|
|
|
|
schema,
|
|
|
|
lgjson,
|
|
|
|
lg
|
|
|
|
) => {
|
|
|
|
if (!fs.existsSync(schemaPath)) {
|
|
|
|
return {
|
|
|
|
status: 404,
|
|
|
|
ref: "Odmdb",
|
|
|
|
info: "pathnamedoesnotexist",
|
|
|
|
moreinfo: { fullpath: schemaPath },
|
|
|
|
};
|
|
|
|
}
|
|
|
|
if (!fs.existsSync(objectPath)) {
|
|
|
|
return {
|
|
|
|
status: 404,
|
|
|
|
ref: "Odmdb",
|
|
|
|
info: "pathnamedoesnotexist",
|
|
|
|
moreinfo: { fullpath: objectPath },
|
|
|
|
};
|
|
|
|
}
|
|
|
|
// store schema file if not empty undefined or {}
|
|
|
|
if (
|
|
|
|
schema &&
|
|
|
|
!(Object.keys(schema).length === 0 && schema.constructor === Object)
|
|
|
|
) {
|
|
|
|
fs.outputJSONSync(`${schemaPath}/schema/${objectName}.json`, schema, {
|
|
|
|
spaces: 2,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
if (
|
|
|
|
lgjson &&
|
|
|
|
lg &&
|
|
|
|
!(Object.keys(lgjson).length === 0 && lgjson.constructor === Object)
|
|
|
|
) {
|
|
|
|
fs.outputJSONSync(`${schemaPath}/lg/${objectName}_${lg}.json`, lgjson, {
|
|
|
|
spaces: 2,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
//create environnement object with the new schema config
|
|
|
|
if (!fs.existsSync(`${objectPath}/${objectName}`)) {
|
|
|
|
fs.outputJsonSync(
|
|
|
|
`${objectPath}/${objectName}/idx/confjson`,
|
|
|
|
{ schema: `${schemaPath}/schema/${objectName}.json` },
|
|
|
|
{ spaces: 2 }
|
|
|
|
);
|
|
|
|
}
|
|
|
|
return { status: 200 };
|
|
|
|
};
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Update an object
|
|
|
|
* @param {string} objectPathname
|
|
|
|
* @param {object} meta update request
|
|
|
|
* lg:
|
|
|
|
* lgobj: object to store in /lg/objectname_lg.json
|
|
|
|
* schema: an update schema
|
|
|
|
* @return {status, ref:"Odmdb", msg:"", data}
|
|
|
|
*
|
|
|
|
* Create a tmp object env and check existing object to identify any issues
|
|
|
|
* If all is fine then apply change by replacing
|
|
|
|
*/
|
|
|
|
Odmdb.updateObject = (objectPathname, meta) => {};
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Get a schema from objectPathname
|
|
|
|
*
|
|
|
|
* todo only local schema => plan a sync each 10minutes
|
|
|
|
* @schemaPath local path adminapi/schema/objectName.json or /tribename/schema/objectName
|
2024-02-16 08:39:42 +01:00
|
|
|
* @validschema boolean if necessary to check schema or not mainly use when change schema;
|
|
|
|
* @lg language you want to get schema
|
2023-12-07 12:04:19 +01:00
|
|
|
* @return {status:200,data:{conf:"schemaconf",schema:"schemacontent"} }
|
|
|
|
*/
|
2024-04-12 12:49:48 +02:00
|
|
|
Odmdb.Schema = (objectPathname, validschema, lg = "en") => {
|
|
|
|
const replacelg = (data) => {
|
2024-02-16 08:39:42 +01:00
|
|
|
// data.en version schema de base, data.fr version schema traduite
|
2024-04-12 12:49:48 +02:00
|
|
|
Object.keys(data.lg).forEach((k) => {
|
|
|
|
console.log(k);
|
|
|
|
if (data.lg[k].title) data.en[k].title = data.lg[k].title;
|
|
|
|
if (data.lg[k].description)
|
|
|
|
data.en[k].description = data.lg[k].description;
|
|
|
|
if (data.lg.properties) {
|
|
|
|
console.log("properties");
|
|
|
|
console.log(data.en.properties);
|
|
|
|
console.log(data.lg.properties);
|
|
|
|
const res = replacelg({
|
|
|
|
en: data.en.properties,
|
|
|
|
lg: data.lg.properties,
|
|
|
|
});
|
|
|
|
data.lg.properties = res.lg;
|
|
|
|
data.en.properties = res.en;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
return data;
|
|
|
|
};
|
|
|
|
const getschemalg = (schemaPath, lg) => {
|
2023-12-07 12:04:19 +01:00
|
|
|
if (schemaPath.slice(-5) != ".json") schemaPath += ".json";
|
|
|
|
if (schemaPath.substring(0, 4) == "http") {
|
|
|
|
// lance requete http pour recuperer le schema avec un await axios
|
|
|
|
} else {
|
2024-03-15 08:49:23 +01:00
|
|
|
schemaPath = `../../${schemaPath}`;
|
2024-04-12 12:49:48 +02:00
|
|
|
if (log)
|
|
|
|
console.log(
|
|
|
|
currentmod,
|
|
|
|
"resolve path schemaPath:",
|
|
|
|
path.resolve(schemaPath)
|
|
|
|
);
|
2023-12-07 12:04:19 +01:00
|
|
|
if (!fs.existsSync(schemaPath)) {
|
|
|
|
return {};
|
|
|
|
} else {
|
2024-02-16 08:39:42 +01:00
|
|
|
let schemalg = fs.readJsonSync(schemaPath);
|
2024-04-12 12:49:48 +02:00
|
|
|
if (lg != "en") {
|
|
|
|
let lgtrans = {};
|
|
|
|
try {
|
|
|
|
lgtrans = fs.readJsonSync(
|
|
|
|
schemaPath
|
|
|
|
.replace("/schema/", "/schema/lg/")
|
|
|
|
.replace(".json", `_${lg}.json`)
|
|
|
|
);
|
|
|
|
const res = replacelg({ en: schemalg, lg: lgtrans });
|
|
|
|
//console.log(res.en.title,res.lg.title)
|
|
|
|
schemalg = res.en;
|
|
|
|
} catch (err) {
|
2024-02-16 08:39:42 +01:00
|
|
|
// console.log('Err',err)
|
|
|
|
// no translation file deliver en by default
|
|
|
|
}
|
|
|
|
}
|
2024-04-12 12:49:48 +02:00
|
|
|
return schemalg;
|
2023-12-07 12:04:19 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2024-05-02 07:36:23 +02:00
|
|
|
const convoptionstoenum = (propertie, lg) => {
|
2024-04-12 12:49:48 +02:00
|
|
|
if (!propertie.options) return propertie;
|
2024-05-02 07:36:23 +02:00
|
|
|
if (!propertie.options["$ref"]) {
|
|
|
|
propertie.msg = "missingref";
|
|
|
|
return propertie;
|
2024-04-12 12:49:48 +02:00
|
|
|
}
|
|
|
|
let optionsfile;
|
2024-05-02 07:36:23 +02:00
|
|
|
let optionstype;
|
|
|
|
if (propertie.options["$ref"].includes("/options/")) {
|
|
|
|
propertie.comment = `options:${propertie.options["$ref"]}}`;
|
|
|
|
optionstype = "options";
|
|
|
|
optionsfile = path.resolve(
|
|
|
|
`../../${propertie.options["$ref"]}_${lg}.json`
|
|
|
|
);
|
|
|
|
}
|
|
|
|
if (propertie.options["$ref"].includes("/idx/")) {
|
|
|
|
(propertie.comment = `itms:${propertie.options["$ref"]}}`),
|
|
|
|
(optionstype = "idx");
|
|
|
|
optionsfile = path.resolve(`../../${propertie.options["$ref"]}.json`);
|
|
|
|
}
|
|
|
|
if (log) console.log(currentmod, "Lien vers options:", optionsfile);
|
|
|
|
if (!fs.existsSync(optionsfile)) {
|
|
|
|
propertie.msg = "missingref";
|
|
|
|
return propertie;
|
|
|
|
} else {
|
|
|
|
delete propertie.options;
|
|
|
|
if (optionstype == "options") {
|
|
|
|
propertie.enum = fs.readJSONSync(optionsfile).lst_idx;
|
2024-04-12 12:49:48 +02:00
|
|
|
}
|
2024-05-02 07:36:23 +02:00
|
|
|
if (optionstype == "idx") {
|
|
|
|
propertie.enum = fs.readJSONSync(optionsfile);
|
2024-04-12 12:49:48 +02:00
|
|
|
}
|
2024-05-02 07:36:23 +02:00
|
|
|
}
|
|
|
|
return propertie;
|
|
|
|
};
|
2024-04-12 12:49:48 +02:00
|
|
|
|
|
|
|
if (log) console.log(currentmod, `${objectPathname}/conf.json`);
|
|
|
|
const res = {
|
|
|
|
status: 200,
|
|
|
|
ref: "Odmdb",
|
|
|
|
msg: "getschema",
|
|
|
|
data: {},
|
|
|
|
};
|
2023-12-07 12:04:19 +01:00
|
|
|
|
2024-02-16 08:39:42 +01:00
|
|
|
if (fs.existsSync(`${objectPathname}/conf.json`)) {
|
2024-04-12 12:49:48 +02:00
|
|
|
res.data.conf = fs.readJsonSync(`${objectPathname}/conf.json`);
|
|
|
|
res.data.schema = getschemalg(res.data.conf.schema, lg);
|
|
|
|
} else {
|
|
|
|
res.data.conf = {};
|
2024-02-16 08:39:42 +01:00
|
|
|
}
|
2024-04-12 12:49:48 +02:00
|
|
|
if (!res.data.schema || Object.keys(res.data.schema).length == 0) {
|
2023-12-07 12:04:19 +01:00
|
|
|
return {
|
|
|
|
status: 404,
|
|
|
|
ref: "Odmdb",
|
|
|
|
msg: "schemanotfound",
|
2024-04-12 12:49:48 +02:00
|
|
|
data: { objectPathname: path.resolve(objectPathname), schema: {} },
|
2023-12-07 12:04:19 +01:00
|
|
|
};
|
|
|
|
}
|
2024-04-12 12:49:48 +02:00
|
|
|
// get $ref from $def
|
2024-05-02 07:36:23 +02:00
|
|
|
if (res.data.schema["$defs"]) {
|
|
|
|
Object.keys(res.data.schema["$defs"]).forEach((ss) => {
|
|
|
|
Object.keys(res.data.schema["$defs"][ss].properties).forEach((pp) => {
|
|
|
|
res.data.schema["$defs"][ss].properties[pp] = convoptionstoenum(
|
|
|
|
res.data.schema["$defs"][ss].properties[pp],
|
|
|
|
lg
|
|
|
|
);
|
|
|
|
});
|
|
|
|
});
|
2024-04-12 12:49:48 +02:00
|
|
|
}
|
2023-12-07 12:04:19 +01:00
|
|
|
Object.keys(res.data.schema.properties).forEach((p) => {
|
|
|
|
if (
|
|
|
|
res.data.schema.properties[p].type == "object" &&
|
|
|
|
res.data.schema.properties[p]["$ref"]
|
|
|
|
) {
|
2024-05-02 07:36:23 +02:00
|
|
|
let subschema;
|
|
|
|
const localdef = res.data.schema.properties[p]["$ref"].includes("#/");
|
2024-04-12 12:49:48 +02:00
|
|
|
if (
|
|
|
|
localdef &&
|
2024-05-02 07:36:23 +02:00
|
|
|
!(
|
|
|
|
res.data.schema["$defs"] &&
|
|
|
|
res.data.schema["$defs"][propertie["$ref"]]
|
|
|
|
)
|
2024-04-12 12:49:48 +02:00
|
|
|
) {
|
2023-12-07 12:04:19 +01:00
|
|
|
res.status = 404;
|
2024-04-12 12:49:48 +02:00
|
|
|
res.msg = "missinglocalref";
|
|
|
|
res.data.missingref = propertie;
|
2023-12-07 12:04:19 +01:00
|
|
|
return res;
|
|
|
|
}
|
2024-04-12 12:49:48 +02:00
|
|
|
if (localdef) {
|
2024-05-02 07:36:23 +02:00
|
|
|
res.data.schema.properties[p] =
|
|
|
|
res.data.schema["$defs"][res.data.schema.properties[p]["$ref"]];
|
|
|
|
} else {
|
|
|
|
subschema = Odmdb.Schema(
|
|
|
|
path.resolve(res.data.schema.properties[p]["$ref"]),
|
|
|
|
validschema,
|
|
|
|
lg
|
|
|
|
);
|
|
|
|
if (subschema.status == 200) {
|
|
|
|
res.data.schema.properties[p] = subschema.data.schema;
|
|
|
|
} else {
|
|
|
|
subschema.data.originschemaproperty = p;
|
|
|
|
return subschema;
|
|
|
|
}
|
2024-02-16 08:39:42 +01:00
|
|
|
}
|
|
|
|
}
|
2024-05-02 07:36:23 +02:00
|
|
|
if (res.data.schema.properties[p].options) {
|
2024-04-12 12:49:48 +02:00
|
|
|
//remplace options par enum:[]
|
2024-05-02 07:36:23 +02:00
|
|
|
res.data.schema.properties[p] = convoptionstoenum(
|
|
|
|
res.data.schema.properties[p],
|
|
|
|
lg
|
|
|
|
);
|
|
|
|
}
|
2023-12-07 12:04:19 +01:00
|
|
|
});
|
|
|
|
|
|
|
|
if (!res.data.schema.apxid) {
|
|
|
|
return {
|
|
|
|
status: 406,
|
|
|
|
ref: "Odmdb",
|
|
|
|
msg: "missingprimarykey",
|
|
|
|
data: {},
|
|
|
|
};
|
|
|
|
}
|
|
|
|
if (res.data.schema.apxidx) {
|
|
|
|
//add apxidx to apxuniquekey in case not
|
2024-06-08 15:44:00 +02:00
|
|
|
|
|
|
|
if (!res.data.schema.apxuniquekey) {
|
|
|
|
return {
|
|
|
|
status: 406,
|
|
|
|
ref: "Odmdb",
|
|
|
|
msg: "missingapxuniquekey",
|
|
|
|
data: {},
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2023-12-07 12:04:19 +01:00
|
|
|
if (!res.data.schema.apxuniquekey.includes(res.data.schema.apxid)) {
|
|
|
|
res.data.schema.apxuniquekey.push(res.data.schema.apxid);
|
|
|
|
}
|
2024-03-15 08:49:23 +01:00
|
|
|
//`../../${req.session.header.xtribe}/objects/persons`
|
2023-12-07 12:04:19 +01:00
|
|
|
res.data.schema.apxidx.forEach((idx) => {
|
|
|
|
if (
|
|
|
|
idx.objkey &&
|
|
|
|
!res.data.schema.apxuniquekey.includes(idx.keyval) &&
|
|
|
|
!idx.objkey.includes(res.data.schema.apxid)
|
|
|
|
) {
|
|
|
|
return {
|
|
|
|
status: 406,
|
|
|
|
ref: "Odmdb",
|
|
|
|
msg: "unconsistencyapxidx",
|
|
|
|
data: {
|
|
|
|
name: idx.name,
|
|
|
|
keyval: idx.keyval,
|
|
|
|
objkey: idx.objkey,
|
|
|
|
apxid: res.data.schema.apxid,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
if (validschema || 1 == 1) {
|
|
|
|
// return {status:200, ref, msg} or {status!:200,multimsg:[{ref,msg;data}]}
|
|
|
|
const check = Checkjson.schema.validation(res.data.schema);
|
|
|
|
if (check.status != 200) {
|
|
|
|
res.multimsg = check.multimsg;
|
|
|
|
res.status = check.status;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return res;
|
|
|
|
};
|
2024-05-02 07:36:23 +02:00
|
|
|
Odmdb.search = (objectPathname, objsearch, role) => {
|
2023-12-07 12:04:19 +01:00
|
|
|
/*
|
2024-05-02 07:36:23 +02:00
|
|
|
if (fields exist):
|
|
|
|
return data:{id:{field:value}]}
|
|
|
|
else:
|
|
|
|
return data:[id]
|
|
|
|
|
|
|
|
exemple:
|
|
|
|
objsearch= {
|
|
|
|
apxid:['toto','titi'],fields:['firstname']
|
2023-12-07 12:04:19 +01:00
|
|
|
}
|
2024-05-02 07:36:23 +02:00
|
|
|
return data:{toto:{firstname:"Paul"},titi:"fistname:"Jacques"}
|
|
|
|
ADD HERE OTHER OPTION USING IDX FOR PERFORMANCE
|
|
|
|
|
2023-12-07 12:04:19 +01:00
|
|
|
example: search exact match hill in townId
|
|
|
|
heavy search={txt:"hill",algo:"match",fieldstring:"toxnId"}
|
|
|
|
light search={txt:"hill", algo:"match", indexfilter:{"key":"townId","value":[]}}
|
|
|
|
light search={txt:"hill", algo:"match", indexfilter:{"key":"nationId","value":"ants"}}
|
|
|
|
|
|
|
|
*/
|
2024-05-02 07:36:23 +02:00
|
|
|
const getschema = Odmdb.Schema(objectPathname, true);
|
|
|
|
if (getschema.status != 200) return getschema;
|
|
|
|
//console.log(getschema.data);
|
2024-05-06 06:44:58 +02:00
|
|
|
const apxid = getschema.data.schema.apxid;
|
2024-05-02 07:36:23 +02:00
|
|
|
let subsearch = objsearch.apxid
|
|
|
|
? objsearch.apxid
|
|
|
|
: fs.readJSONSync(`${objectPathname}/idx/lst_${apxid}.json`);
|
|
|
|
// ADD HERE OTHER FILTRATION SEARCH depending of question
|
2024-05-06 06:44:58 +02:00
|
|
|
if (objsearch.condition) {
|
2024-05-02 07:36:23 +02:00
|
|
|
// remove or add in subsearch
|
|
|
|
}
|
|
|
|
if (objsearch.fields) {
|
|
|
|
const resultat = {};
|
2024-05-06 06:44:58 +02:00
|
|
|
//console.log(getschema.data.schema.apxaccessrights);
|
|
|
|
//console.log(role);
|
2024-05-02 07:36:23 +02:00
|
|
|
const accessright = Odmdb.accessright(
|
|
|
|
getschema.data.schema.apxaccessrights,
|
|
|
|
role
|
|
|
|
);
|
|
|
|
//console.log(accessright);
|
2024-05-06 06:44:58 +02:00
|
|
|
if (objsearch.fields == "all") {
|
|
|
|
//console.log(getschema.data)
|
|
|
|
objsearch.fields = Object.keys(getschema.data.schema.properties);
|
|
|
|
}
|
|
|
|
const ifields = {};
|
2024-05-02 07:36:23 +02:00
|
|
|
subsearch.forEach((i) => {
|
|
|
|
const ifields = {};
|
|
|
|
if (fs.existsSync(`${objectPathname}/itm/${i}.json`)) {
|
|
|
|
const itm = fs.readJSONSync(`${objectPathname}/itm/${i}.json`);
|
|
|
|
if (itm.owner && itm.owner == role.xalias) {
|
|
|
|
role.xprofils.push("owner");
|
2024-05-06 06:44:58 +02:00
|
|
|
} else {
|
|
|
|
role.xprofils = role.xprofils.filter((e) => e !== "owner");
|
2024-05-02 07:36:23 +02:00
|
|
|
}
|
|
|
|
objsearch.fields.forEach((f) => {
|
2024-05-30 12:24:46 +02:00
|
|
|
if (accessright.R.length == 0 || accessright.R.includes(f)) {
|
2024-05-02 07:36:23 +02:00
|
|
|
ifields[f] = itm[f];
|
|
|
|
} else {
|
2024-05-21 09:18:52 +02:00
|
|
|
//ifields[f] = "unauthorized";
|
2024-05-02 07:36:23 +02:00
|
|
|
}
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
ifields.notfound = true;
|
|
|
|
}
|
|
|
|
resultat[i] = ifields;
|
|
|
|
});
|
|
|
|
return {
|
|
|
|
status: 200,
|
|
|
|
ref: "Odmdb",
|
|
|
|
msg: "resultsearchlist",
|
|
|
|
data: resultat,
|
|
|
|
};
|
|
|
|
} else {
|
|
|
|
return {
|
|
|
|
status: 200,
|
|
|
|
ref: "Odmdb",
|
|
|
|
msg: "resultsearchlist",
|
|
|
|
data: [subsearch],
|
|
|
|
};
|
|
|
|
}
|
2023-12-07 12:04:19 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
Odmdb.r = (objectPathname, apxid, role) => {
|
|
|
|
const itmf = `${objectPathname}/itm/${apxid}.json`;
|
|
|
|
if (!fs.existsSync(itmf)) {
|
|
|
|
return {
|
|
|
|
status: 404,
|
|
|
|
ref: "Odmdb",
|
|
|
|
msg: "persondoesnotexist",
|
|
|
|
data: { person: apxid },
|
|
|
|
};
|
|
|
|
}
|
|
|
|
const getschema = Odmdb.Schema(objectPathname, true);
|
|
|
|
if (getschema.status != 200) return getschema;
|
|
|
|
const itm = fs.readJsonSync(itmf);
|
|
|
|
if (itm.owner && itm.owner == role.xalias) {
|
|
|
|
role.xprofils.push("owner");
|
|
|
|
}
|
|
|
|
const accessright = Odmdb.accessright(
|
|
|
|
getschema.data.schema.apxaccessrights,
|
|
|
|
role
|
|
|
|
);
|
|
|
|
if (!accessright.R) {
|
|
|
|
return {
|
|
|
|
status: 403,
|
|
|
|
ref: "Odmdb",
|
2023-12-29 13:38:47 +01:00
|
|
|
msg: "profilnotallow",
|
2024-02-16 08:39:42 +01:00
|
|
|
data: { person: apxid },
|
2023-12-07 12:04:19 +01:00
|
|
|
};
|
|
|
|
}
|
|
|
|
const data = {};
|
|
|
|
accessright.R.forEach((p) => {
|
|
|
|
data[p] = itm[p];
|
|
|
|
});
|
2023-12-29 13:38:47 +01:00
|
|
|
return { status: 200, ref: "Odmdb", msg: "profilallow", data };
|
2023-12-07 12:04:19 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
/**
|
|
|
|
* To get an array of item (itm) per primarykey with a list of field
|
|
|
|
* Object are store in objectPath/objectName/conf.json contain
|
|
|
|
*
|
|
|
|
* @objectPathname where object are store (where /object/conf.json indicate where the schema is)
|
|
|
|
* @uuidprimarykeyList list of uuid requested
|
|
|
|
* @role {xalias,xprofiles} allow to get accessright come from header
|
|
|
|
* @propertiesfilter (optionnal) key to return for each object (if undefined then return all)
|
|
|
|
* @Return {status:200; data:{uuid:{data filter by @propertiesfilter},uuid:"notfound"}}
|
|
|
|
|
|
|
|
ASUP doit être gerer au niveau des view des index
|
|
|
|
|
|
|
|
|
|
|
|
*/
|
|
|
|
Odmdb.ASUPreads = (objectPathname, apxidlist, role, propertiesfilter) => {
|
|
|
|
const res = { status: 200, data: {} };
|
|
|
|
const getschema = Odmdb.Schema(objectPathname, true);
|
|
|
|
if (getschema.status != 200) return getschema;
|
|
|
|
// Test if readable at least if owner
|
|
|
|
role.xprofils.push("owner");
|
|
|
|
const accessright = (Odmdb.accessright =
|
|
|
|
(getschema.data.schema.apxaccessright, role.xprofils));
|
|
|
|
if (!accessright.R) {
|
|
|
|
return {
|
|
|
|
status: 403,
|
|
|
|
ref: "Odmdb",
|
2023-12-29 13:38:47 +01:00
|
|
|
msg: "profilnotallow",
|
2023-12-07 12:04:19 +01:00
|
|
|
data: { crud: "R", accessright },
|
|
|
|
};
|
|
|
|
}
|
|
|
|
apxidlist.forEach((id) => {
|
|
|
|
if (fs.existsSync(`${objectPathname}/itm/${id}.json`)) {
|
|
|
|
const objectdata = fs.readJsonSync(`${objectPathname}/itm/${id}.json`);
|
|
|
|
if (objectdata.owner && objectdata.owner == role.xalias) {
|
|
|
|
if (!role.xprofils.includes("owner")) role.xprofils.push("owner");
|
|
|
|
} else {
|
|
|
|
if (role.xprofils.includes("owner"))
|
|
|
|
role.xprofils = role.xprofils.filter((e) => e !== "owner");
|
|
|
|
}
|
|
|
|
const accessright = Odmdb.accessright(
|
|
|
|
getschema.data.schema.apxaccessright,
|
|
|
|
role
|
|
|
|
);
|
|
|
|
if (!accessright.R) {
|
|
|
|
res.data[id] = "forbiden";
|
|
|
|
} else {
|
|
|
|
let newpropertiesfilter = Object.keys(objectdata);
|
|
|
|
if (accessright.R.length > 0) {
|
|
|
|
const setaccess = new Set(accessright.R);
|
|
|
|
if (!propertiesfilter) propertiesfilter = Object.keys(objectdata);
|
|
|
|
newpropertiesfilter = propertiesfilter.filter((f) =>
|
|
|
|
setaccess.has(f)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
const objinfo = {};
|
|
|
|
newpropertiesfilter.forEach((k) => {
|
|
|
|
if (objectdata[k]) objinfo[k] = objectdata[k];
|
|
|
|
});
|
|
|
|
res.data[id] = objinfo;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
res.data[id] = "notfound";
|
|
|
|
}
|
|
|
|
});
|
|
|
|
return res;
|
|
|
|
};
|
|
|
|
/**
|
|
|
|
* Convert profils in accessright
|
|
|
|
* @param {*} apxaccessright from schema object {profilname:{R}}
|
|
|
|
* @param {*} role {xprofils,xalias} accessible after isAuthenticated
|
|
|
|
* @returns access right to C create if present, to read (properties list or all if empty), to Update properties list or all if empty, D elete
|
|
|
|
* example: {"C":[],"R":[properties list],"U":[properties ist],"D":[]}
|
|
|
|
*/
|
|
|
|
Odmdb.accessright = (apxaccessrights, role) => {
|
2024-02-16 08:39:42 +01:00
|
|
|
//if (log) console.log(currentmod,"apxaccessrights",apxaccessrights)
|
|
|
|
//if (log) console.log(currentmod,"role",role)
|
2023-12-07 12:04:19 +01:00
|
|
|
const accessright = {};
|
2024-05-30 12:24:46 +02:00
|
|
|
console.log();
|
2023-12-07 12:04:19 +01:00
|
|
|
role.xprofils.forEach((p) => {
|
|
|
|
if (apxaccessrights[p]) {
|
|
|
|
Object.keys(apxaccessrights[p]).forEach((act) => {
|
|
|
|
if (!accessright[act]) {
|
|
|
|
accessright[act] = apxaccessrights[p][act];
|
|
|
|
} else {
|
2024-05-06 06:44:58 +02:00
|
|
|
if (accessright[act].length != 0) {
|
|
|
|
//case where [] that mean all accessright on any properties
|
|
|
|
accessright[act] = [
|
|
|
|
...new Set([...accessright[act], ...apxaccessrights[p][act]]),
|
|
|
|
];
|
|
|
|
}
|
2023-12-07 12:04:19 +01:00
|
|
|
}
|
2024-02-16 08:39:42 +01:00
|
|
|
//if (log) console.log(currentmod,act,accessright[act])
|
2023-12-07 12:04:19 +01:00
|
|
|
});
|
|
|
|
}
|
|
|
|
});
|
|
|
|
return accessright;
|
|
|
|
};
|
|
|
|
/**
|
|
|
|
* CUD a data itm into objectPathname if checkJson is valid
|
|
|
|
* and update idx
|
|
|
|
* idx is upto date for unique properties but not for list
|
|
|
|
* @param {string} objectpathname folder name where object are stored
|
|
|
|
* @param {object} itm an object respecting the checkJson schema in objectPathname/conf.json
|
|
|
|
* @param {string} crud: C reate U pdate D elete
|
|
|
|
* @param {array} role {xprofils,xalias} xprofils list of profils like anonymous,pagans, person owner is deuce if object properties owner is alias
|
|
|
|
|
|
|
|
* */
|
|
|
|
|
|
|
|
Odmdb.cud = (objectPathname, crud, itm, role, runindex = true) => {
|
|
|
|
const getschema = Odmdb.Schema(objectPathname, true);
|
2024-04-12 12:49:48 +02:00
|
|
|
|
2023-12-07 12:04:19 +01:00
|
|
|
if (getschema.status != 200) return getschema;
|
|
|
|
|
|
|
|
if (!itm[getschema.data.schema.apxid]) {
|
|
|
|
return {
|
|
|
|
status: 406,
|
|
|
|
ref: "Odmdb",
|
|
|
|
msg: "apxidmissing",
|
|
|
|
data: { missingkey: getschema.data.schema.apxid },
|
|
|
|
};
|
|
|
|
}
|
|
|
|
fs.ensureDirSync(`${objectPathname}/itm/`);
|
|
|
|
fs.ensureDirSync(`${objectPathname}/idx/`);
|
|
|
|
const existid = fs.existsSync(
|
|
|
|
`${objectPathname}/itm/${itm[getschema.data.schema.apxid]}.json`
|
|
|
|
);
|
2024-04-12 12:49:48 +02:00
|
|
|
if (log) console.log(currentmod, "Pass schema itm existid = ", existid);
|
2023-12-07 12:04:19 +01:00
|
|
|
/*const pathindex = `${objectPathname}/idx/lst_${getschema.data.schema.apxid}.json`;
|
|
|
|
if (!fs.existsSync(pathindex)) {
|
|
|
|
fs.outputJSONSync(pathindex, []);
|
|
|
|
fs.ensureDirSync(`${objectPathname}/itm/`);
|
|
|
|
}
|
|
|
|
const existid = fs
|
|
|
|
.readJSONSync(pathindex)
|
|
|
|
.includes(itm[getschema.data.schema.apxid]);
|
|
|
|
*/
|
|
|
|
if (existid && crud == "C") {
|
|
|
|
return {
|
|
|
|
status: 406,
|
|
|
|
ref: "Odmdb",
|
|
|
|
msg: "alreadyexist",
|
|
|
|
data: {
|
|
|
|
objectname: path.basename(objectPathname),
|
|
|
|
key: getschema.data.schema.apxid,
|
|
|
|
val: itm[getschema.data.schema.apxid],
|
|
|
|
},
|
|
|
|
};
|
|
|
|
}
|
|
|
|
if (!existid && ["U", "D"].includes(crud)) {
|
|
|
|
return {
|
2024-06-08 15:44:00 +02:00
|
|
|
status: 404,
|
2023-12-07 12:04:19 +01:00
|
|
|
ref: "Odmdb",
|
|
|
|
msg: "doesnotexist",
|
|
|
|
data: {
|
|
|
|
objectname: path.basename(objectPathname),
|
|
|
|
key: getschema.data.schema.apxid,
|
|
|
|
val: itm[getschema.data.schema.apxid],
|
|
|
|
},
|
|
|
|
};
|
|
|
|
}
|
2024-05-30 12:24:46 +02:00
|
|
|
const feedbackinfo = {};
|
2023-12-07 12:04:19 +01:00
|
|
|
const itmold = existid
|
|
|
|
? fs.readJSONSync(
|
|
|
|
`${objectPathname}/itm/${itm[getschema.data.schema.apxid]}.json`
|
|
|
|
)
|
|
|
|
: {};
|
|
|
|
if (existid && itmold.owner && itmold.owner == role.xalias) {
|
|
|
|
role.xprofils.push("owner");
|
|
|
|
}
|
|
|
|
if (!existid && crud == "C" && !itm.owner) {
|
|
|
|
// set owner cause this is a Create
|
|
|
|
itm.owner = role.xalias;
|
|
|
|
role.xprofils.push("owner");
|
|
|
|
}
|
|
|
|
//get accessright {C:[],R:[],U:[],D:[]} if exist means authorize, if array contain properties (for R and U) right is only allowed on properties
|
|
|
|
const accessright = Odmdb.accessright(
|
|
|
|
getschema.data.schema.apxaccessrights,
|
|
|
|
role
|
|
|
|
);
|
2024-06-08 15:44:00 +02:00
|
|
|
|
2024-05-30 12:24:46 +02:00
|
|
|
if (log) console.log(currentmod, "Accessright to: ", accessright);
|
2024-06-08 15:44:00 +02:00
|
|
|
if (getschema.data.schema.apxaccessrights.contextrules) {
|
|
|
|
//Need to check context to validate accessright
|
|
|
|
//require('../../')
|
|
|
|
}
|
2023-12-07 12:04:19 +01:00
|
|
|
if (
|
|
|
|
(crud == "C" && !accessright.C) ||
|
|
|
|
(crud == "D" && !accessright.D) ||
|
|
|
|
(crud == "U" && !accessright.U)
|
|
|
|
) {
|
2024-04-12 12:49:48 +02:00
|
|
|
if (log) console.log(currentmod, "Forbidden accessright:", accessright);
|
2023-12-07 12:04:19 +01:00
|
|
|
return {
|
|
|
|
status: 403,
|
|
|
|
ref: "Odmdb",
|
2023-12-29 13:38:47 +01:00
|
|
|
msg: "profilnotallow",
|
2023-12-07 12:04:19 +01:00
|
|
|
data: { crud, accessright },
|
|
|
|
};
|
|
|
|
}
|
|
|
|
//delete or save
|
2024-06-08 15:44:00 +02:00
|
|
|
|
2023-12-07 12:04:19 +01:00
|
|
|
if (crud == "D") {
|
|
|
|
itmold["dt_delete"] = dayjs().toISOString();
|
|
|
|
fs.outputJSONSync(
|
|
|
|
`${objectPathname}/delitm/${itmold[getschema.data.schema.apxid]}.json`,
|
|
|
|
itmold
|
|
|
|
);
|
|
|
|
fs.rmSync(
|
|
|
|
`${objectPathname}/itm/${itmold[getschema.data.schema.apxid]}.json`
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
// if Create Update erase old version
|
|
|
|
let itmtostore = itm;
|
|
|
|
if (crud == "U" && accessright.U.length > 0) {
|
|
|
|
itmtostore = itmold;
|
2024-05-30 12:24:46 +02:00
|
|
|
const keynotallow = Object.keys(itm).filter(
|
|
|
|
(el) => !accessright.U.includes(el)
|
|
|
|
);
|
|
|
|
if (keynotallow.length > 0) {
|
|
|
|
feedbackinfo.keynotallow = keynotallow;
|
|
|
|
}
|
2024-06-08 15:44:00 +02:00
|
|
|
accessright.U.forEach(async (p) => {
|
2023-12-07 12:04:19 +01:00
|
|
|
itmtostore[p] = itm[p];
|
|
|
|
});
|
|
|
|
itmtostore.dt_update = dayjs().toISOString();
|
|
|
|
}
|
2024-06-08 15:44:00 +02:00
|
|
|
Object.keys(itmtostore).forEach((k) => {
|
|
|
|
//Manage base64 case image to convert ans store in webp
|
|
|
|
if (k.includes("imgbase64_") && itmtostore[k] != "") {
|
|
|
|
fs.ensureDirSync(`${objectPathname}/img/`);
|
|
|
|
const imgb64 = itmtostore[k];
|
|
|
|
const extension = imgb64.split(";base64,")[0].split("/")[1];
|
|
|
|
const newk = k.split("_").slice(1).join("_");
|
|
|
|
const filenameimg = `/img/${
|
|
|
|
itmtostore[getschema.data.schema.apxid]
|
|
|
|
}_${newk}.${extension}`;
|
|
|
|
if (!getschema.data.schema.properties[newk]) {
|
|
|
|
if (log)
|
|
|
|
console.log(
|
|
|
|
currentmod,
|
|
|
|
" Check your schema it miss a propertie because imgbase64_blabla must have a blabla propertie to host the pathfile image"
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
const objectname = objectPathname.split("/").slice(-1)[0];
|
|
|
|
itmtostore[newk] = `${objectname}/${filenameimg}`;
|
|
|
|
//const webp = require("webp-converter");
|
|
|
|
const buf = Buffer.from(itmtostore[k], 'base64');
|
|
|
|
//const imgwebp = await webp.bufferToWebp(buf);
|
|
|
|
fs.writeFileSync(`${objectPathname}/${filenameimg}`, buf);
|
|
|
|
itmtostore[k] = "";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
2023-12-07 12:04:19 +01:00
|
|
|
if (crud == "C") itmtostore.dt_create = dayjs().toISOString();
|
|
|
|
// check consistency of datatostore
|
|
|
|
const chkdata = Checkjson.schema.data(
|
|
|
|
getschema.data.schema,
|
|
|
|
itmtostore,
|
|
|
|
false
|
|
|
|
);
|
2024-03-28 23:17:49 +01:00
|
|
|
if (chkdata.status != 200) {
|
2024-04-12 12:49:48 +02:00
|
|
|
if (log) console.log(currentmod, "Unconsistency data", chkdata);
|
2024-03-28 23:17:49 +01:00
|
|
|
return chkdata;
|
|
|
|
}
|
2024-04-12 12:49:48 +02:00
|
|
|
if (log) console.log(currentmod, "Data compliance with schema");
|
2023-12-07 12:04:19 +01:00
|
|
|
if (!getschema.data.schema.apxuniquekey)
|
|
|
|
getschema.data.schema.apxuniquekey = [];
|
2024-04-12 12:49:48 +02:00
|
|
|
if (log)
|
|
|
|
console.log(
|
|
|
|
currentmod,
|
|
|
|
`${objectPathname}/itm/${chkdata.data.apxid}.json`
|
|
|
|
);
|
|
|
|
if (log) console.log(currentmod, chkdata.data.itm);
|
2023-12-07 12:04:19 +01:00
|
|
|
fs.outputJSONSync(
|
|
|
|
`${objectPathname}/itm/${chkdata.data.apxid}.json`,
|
|
|
|
chkdata.data.itm
|
|
|
|
);
|
|
|
|
}
|
2024-02-16 08:39:42 +01:00
|
|
|
//if (log) console.log(currentmod,"getschema", getschema);
|
2023-12-07 12:04:19 +01:00
|
|
|
//rebuild index if requested
|
2024-04-12 12:49:48 +02:00
|
|
|
if (log) console.log(currentmod, "runidx", runindex);
|
2023-12-07 12:04:19 +01:00
|
|
|
if (runindex) Odmdb.runidx(objectPathname, getschema.data.schema);
|
|
|
|
getschema.data.conf.lastupdatedata = dayjs().toISOString();
|
|
|
|
fs.outputJSONSync(`${objectPathname}/conf.json`, getschema.data.conf);
|
|
|
|
return {
|
|
|
|
status: 200,
|
|
|
|
ref: "Odmdb",
|
|
|
|
msg: "cudsuccessfull",
|
2024-05-30 12:24:46 +02:00
|
|
|
data: feedbackinfo,
|
2023-12-07 12:04:19 +01:00
|
|
|
};
|
|
|
|
};
|
|
|
|
/**
|
|
|
|
* apxidx:[list of index file
|
|
|
|
* { name: string, // filename in /idx/name.json
|
|
|
|
* type: array (list of unique value found in keyval), example: [val1, val2, val3,....] eventualy filter
|
|
|
|
* view (only unique key and object with objkey eventually filter by filter ) example: {idalias:{itm}}
|
|
|
|
* distribution (=> {val of keyval:[apxid itm value]} with )
|
|
|
|
* keyval:string, // a propertie of type string or array
|
|
|
|
* objkeys:[] and array of properties if empty [] means all properties, if 1 element => an array of this properties
|
|
|
|
* filter : an eval function base on itm data (itm.key) that return true (return data item) or false (ignore item)
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
Odmdb.runidx = (objectPathname, schema) => {
|
2024-04-12 12:49:48 +02:00
|
|
|
if (log) console.log(currentmod, `idx for ${objectPathname}`);
|
2023-12-07 12:04:19 +01:00
|
|
|
if (!schema || !schema.apxid) {
|
|
|
|
const getschema = Odmdb.Schema(objectPathname, true);
|
|
|
|
if (getschema.status != 200) return getschema;
|
|
|
|
schema = getschema.data.schema;
|
|
|
|
}
|
|
|
|
const ventil = {};
|
|
|
|
schema.apxidx.forEach((id) => {
|
|
|
|
ventil[id.name] = id;
|
|
|
|
if (id.keyval.includes(".")) {
|
|
|
|
// Means keyval is an object that can be only string for an idx
|
|
|
|
ventil[id.name].isobject = true;
|
|
|
|
ventil[id.name].isunique = false;
|
|
|
|
ventil[id.name].nbobjkey = 0;
|
|
|
|
ventil[id.name].keyvaltype = "string";
|
|
|
|
ventil[id.name].filter = id.filter ? id.filter.replace(/;/g, "") : ""; // check integrity of string
|
|
|
|
ventil[id.name].data = {};
|
|
|
|
} else {
|
|
|
|
ventil[id.name].isunique = schema.apxuniquekey.includes(id.keyval);
|
|
|
|
ventil[id.name].nbobjkey = id.objkey ? id.objkey.length : 0;
|
|
|
|
ventil[id.name].keyvaltype = schema.properties[id.keyval].type;
|
|
|
|
ventil[id.name].filter = id.filter ? id.filter.replace(/;/g, "") : ""; // check integrity of string
|
|
|
|
ventil[id.name].data = ventil[id.name].type == "array" ? [] : {};
|
|
|
|
}
|
|
|
|
});
|
|
|
|
glob.sync(`${objectPathname}/itm/*.json`).forEach((i) => {
|
|
|
|
const itm = fs.readJSONSync(i);
|
|
|
|
Object.keys(ventil).forEach((n) => {
|
|
|
|
let keep = true;
|
|
|
|
if (ventil[n].filter != "") {
|
|
|
|
try {
|
|
|
|
keep = eval(ventil[n].filter);
|
|
|
|
} catch (err) {
|
|
|
|
keep = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (keep && ventil[n].type == "array" && itm[ventil[n].keyval]) {
|
|
|
|
if (ventil[n].keyvaltype == "array") {
|
|
|
|
itm[ventil[n].keyval].forEach((v) => {
|
|
|
|
if (!ventil[n].data.includes(v)) ventil[n].data.push(v);
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
if (!ventil[n].data.includes(itm[ventil[n].keyval]))
|
|
|
|
ventil[n].data.push(itm[ventil[n].keyval]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (
|
|
|
|
keep &&
|
|
|
|
ventil[n].type == "view" &&
|
|
|
|
ventil[n].isunique &&
|
|
|
|
itm[ventil[n].keyval]
|
|
|
|
) {
|
|
|
|
if (ventil[n].nbobjkey == 0)
|
|
|
|
ventil[n].data[itm[ventil[n].keyval]] = itm;
|
|
|
|
if (ventil[n].nbobjkey == 1)
|
|
|
|
ventil[n].data[itm[ventil[n].keyval]] = itm[ventil[n].objkey[0]];
|
|
|
|
if (ventil[n].nbobjkey > 1) {
|
|
|
|
const objdata = {};
|
|
|
|
Object.keys(itm).forEach((k) => {
|
|
|
|
if (ventil[n].objkey.includes(k)) objdata[k] = itm[k];
|
|
|
|
});
|
|
|
|
ventil[n].data[itm[ventil[n].keyval]] = objdata;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (keep && ventil[n].type == "distribution" && itm[ventil[n].keyval]) {
|
|
|
|
const listval =
|
|
|
|
ventil[n].keyvaltype == "string"
|
|
|
|
? [itm[ventil[n].keyval]]
|
|
|
|
: itm[ventil[n].keyval];
|
|
|
|
// itm[ventil[n].keyval] is an array
|
|
|
|
listval.forEach((val) => {
|
|
|
|
if (!ventil[n].data[val]) ventil[n].data[val] = [];
|
|
|
|
ventil[n].data[val].push(itm[schema.apxid]);
|
|
|
|
});
|
|
|
|
}
|
2024-02-16 08:39:42 +01:00
|
|
|
if (
|
|
|
|
keep &&
|
|
|
|
ventil[n].type == "distribution" &&
|
|
|
|
ventil[n].isobject &&
|
|
|
|
itm[ventil[n].keyval.split(".")[0]]
|
|
|
|
) {
|
2023-12-07 12:04:19 +01:00
|
|
|
let itmval = JSON.parse(JSON.stringify(itm));
|
2024-04-12 12:49:48 +02:00
|
|
|
if (log) console.log(currentmod, ventil[n].keyval);
|
|
|
|
if (log) console.log(currentmod, itmval);
|
2023-12-07 12:04:19 +01:00
|
|
|
ventil[n].keyval
|
|
|
|
.split(".")
|
2024-02-16 08:39:42 +01:00
|
|
|
.forEach((i) => (itmval = itmval[i] ? itmval[i] : null));
|
2023-12-07 12:04:19 +01:00
|
|
|
if (itmval) {
|
|
|
|
if (!ventil[n].data[itmval]) ventil[n].data[itmval] = [];
|
|
|
|
ventil[n].data[itmval].push(itm[schema.apxid]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
});
|
|
|
|
Object.keys(ventil).forEach((n) => {
|
2024-02-16 08:39:42 +01:00
|
|
|
//if (log) console.log(currentmod,`${objectPathname}/idx/${ventil[n].name}.json`)
|
2023-12-07 12:04:19 +01:00
|
|
|
fs.outputJSON(
|
|
|
|
`${objectPathname}/idx/${ventil[n].name}.json`,
|
|
|
|
ventil[n].data
|
|
|
|
);
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
|
|
|
/**
|
|
|
|
* create/update idx from itm(s)
|
|
|
|
*
|
|
|
|
* @param {string} objectPathname
|
|
|
|
* @param {object} itm item to Create or to Update or {} if crud == I or crud == D
|
|
|
|
* @param {object} itmold (current item) if crud == U or D to get previous itm before change or {} if crud==I or C
|
|
|
|
* @param {letter} crud CUDI C add, U update, D delete I reindex
|
|
|
|
* @param {array} idx if specific request to rebuild list of idx only if [] then use schema one
|
|
|
|
* @param {object} schema if empty it use schema from Odmdb.Schema().data.schema
|
|
|
|
*
|
|
|
|
* example create alias 12 name fred:
|
2024-03-15 08:49:23 +01:00
|
|
|
* Odmdb.idxfromitm('../../../ndda/persons',"C",{alias:'12',name:"fred"},{},[], {person schema})
|
2023-12-07 12:04:19 +01:00
|
|
|
* example update alias 12 in name freddy:
|
2024-03-15 08:49:23 +01:00
|
|
|
* Odmdb.idxfromitm('../../../ndda/persons',"U",{alias:'12',name:"freddy"},{alias:'12',name:"fred"},[], {person schema})
|
2023-12-07 12:04:19 +01:00
|
|
|
* example delete alias 12:
|
2024-03-15 08:49:23 +01:00
|
|
|
* Odmdb.idxfromitm('../../../ndda/persons',"D",{},{alias:'12',name:"fred"},[], {person schema})
|
2023-12-07 12:04:19 +01:00
|
|
|
* example to rebuild all index from scratch
|
2024-03-15 08:49:23 +01:00
|
|
|
* Odmdb.idxfromitm('../../../ndda/persons',"I",{},{},[], {person schema})
|
2023-12-07 12:04:19 +01:00
|
|
|
* example to rebuild only publickey_alias index from scratch
|
2024-03-15 08:49:23 +01:00
|
|
|
* Odmdb.idxfromitm('../../../ndda/pagans',"I",{},{},[{ name:"publickey_alias",keyval:"publickey",objkey:["alias"]}], {pagans schema})
|
2023-12-07 12:04:19 +01:00
|
|
|
*
|
|
|
|
*/
|
|
|
|
Odmdb.ASUPidxfromitm = (
|
|
|
|
objectPathname,
|
|
|
|
crud,
|
|
|
|
itm,
|
|
|
|
itmold,
|
|
|
|
idxs = [],
|
|
|
|
schema
|
|
|
|
) => {
|
2024-04-12 12:49:48 +02:00
|
|
|
if (log)
|
|
|
|
console.log(currentmod, `idxfromitem for ${objectPathname} action:${crud}`);
|
2023-12-07 12:04:19 +01:00
|
|
|
if (!schema || !schema.apxid) {
|
|
|
|
const getschema = Odmdb.Schema(objectPathname, true);
|
|
|
|
if (getschema.status != 200) return getschema;
|
|
|
|
schema = getschema.data.schema;
|
|
|
|
}
|
2024-04-12 12:49:48 +02:00
|
|
|
if (log) console.log(currentmod, schema.apxuniquekey);
|
2023-12-07 12:04:19 +01:00
|
|
|
const itms = crud == "I" ? glob.sync(`${objectPathname}/itm/*.json`) : [itm];
|
2024-04-12 12:49:48 +02:00
|
|
|
if (log) console.log(currentmod, itms);
|
2023-12-07 12:04:19 +01:00
|
|
|
if (crud == "I") {
|
|
|
|
//reinit all idx
|
|
|
|
idxs.forEach((idx) => {
|
|
|
|
fs.remove(`${objectPathname}/idx/${idx.name}.json`);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
let idxtoreindex = []; //store index that has to be reprocessto get full context
|
|
|
|
idxs = idxs.length == 0 ? schema.apxidx : idxs; // get all index if none
|
|
|
|
itms.forEach((i) => {
|
|
|
|
if (crud == "I") {
|
|
|
|
itm = fs.readJSONSync(i);
|
|
|
|
}
|
2024-02-16 08:39:42 +01:00
|
|
|
//if (log) console.log(currentmod,itm);
|
2023-12-07 12:04:19 +01:00
|
|
|
idxs.forEach((idx) => {
|
|
|
|
const keyvalisunique = schema.apxuniquekey.includes(idx.keyval); // check if keyval is unique mean store as an object (or string) else store as an array
|
|
|
|
const idxsrc = `${objectPathname}/idx/${idx.name}.json`;
|
|
|
|
const idxinit = idx.name.substring(0, 4) == "lst_" ? [] : {}; // select type of idx (array or object)
|
|
|
|
let idxfile = !fs.existsSync(idxsrc) ? idxinit : fs.readJSONSync(idxsrc);
|
|
|
|
if (idx.name.substring(0, 4) == "lst_") {
|
|
|
|
if (["D", "U"].includes(crud)) {
|
|
|
|
if (keyvalisunique) {
|
|
|
|
idxfile = idxfile.filter((e) => e !== itmold[idx.keyval]);
|
|
|
|
} else {
|
|
|
|
idxtoreindex.push(idx); //@todo
|
|
|
|
}
|
|
|
|
}
|
2024-04-12 12:49:48 +02:00
|
|
|
if (log) console.log(currentmod, idx.keyval);
|
|
|
|
if (log) console.log(currentmod, itm[idx.keyval]);
|
2023-12-07 12:04:19 +01:00
|
|
|
|
|
|
|
if (
|
|
|
|
["C", "U", "I"].includes(crud) &&
|
|
|
|
!idxfile.includes(itm[idx.keyval])
|
|
|
|
) {
|
|
|
|
idxfile.push(itm[idx.keyval]);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if (!idx.objkey) {
|
|
|
|
//mean all properties
|
|
|
|
idx.objkey = Object.keys(schema.properties);
|
|
|
|
}
|
|
|
|
if (keyvalisunique && idx.objkey.length == 1) {
|
|
|
|
if (["D", "U"].includes(crud)) {
|
|
|
|
delete idxfile[itmold[idx.keyval]];
|
|
|
|
} else {
|
|
|
|
idxfile[itm[idx.keyval]] = itm[idx.objkey[0]];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (keyvalisunique && idx.objkey.length > 1) {
|
|
|
|
if (["D", "U"].includes(crud)) {
|
|
|
|
delete idxfile[itmold[idx.keyval]];
|
|
|
|
} else {
|
|
|
|
const itmfilter = {};
|
|
|
|
idx.objkey.forEach((i) => {
|
|
|
|
if (itm[i]) itmfilter[i] = itm[i];
|
|
|
|
});
|
|
|
|
idxfile[itm[idx.keyval]] = itmfilter;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (!keyvalisunique && idx.objkey.length == 1) {
|
|
|
|
if (
|
|
|
|
["D", "U"].includes(crud) &&
|
|
|
|
idxfile[itmold[idx.keyval]].IndexOf(itmold[idx.objkey[0]]) > -1
|
|
|
|
) {
|
|
|
|
// U because need to remove previous value before adding it
|
|
|
|
idxfile[itmold[idx.keyval]].splice(
|
|
|
|
idxfile[itmold[idx.keyval]].IndexOf(itmold[idx.objkey[0]]),
|
|
|
|
1
|
|
|
|
);
|
|
|
|
}
|
|
|
|
if (["C", "U", "I"].includes(crud)) {
|
|
|
|
if (!idxfile[itm[idx.keyval]]) idxfile[itm[idx.keyval]] = [];
|
|
|
|
if (!idxfile[itm[idx.keyval]].includes(itm[idx.objkey[0]])) {
|
|
|
|
idxfile[itm[idx.keyval]].push(itm[idx.objkey[0]]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (!keyvalisunique && idx.objkey.length > 1) {
|
|
|
|
if (["D", "U"].includes(crud) && idxfile[itmold[idx.keyval]]) {
|
|
|
|
// U because need to remove previous value before adding it
|
|
|
|
let arrayofit = [];
|
|
|
|
idxfile[itmold[idx.keyval]].forEach((it) => {
|
|
|
|
if (it[schema.apxid] != itm[schema.apxid]) arrayofit.push(it);
|
|
|
|
});
|
|
|
|
idxfile[itmold[idx.keyval]] = arrayofit;
|
|
|
|
}
|
|
|
|
if (["C", "U", "I"].includes(crud)) {
|
|
|
|
const itmfilter = {};
|
|
|
|
idx.objkey.forEach((i) => {
|
|
|
|
if (itm[i]) itmfilter[i] = itm[i];
|
|
|
|
});
|
|
|
|
if (!idxfile[itm[idx.keyval]]) idxfile[itm[idx.keyval]] = [];
|
|
|
|
idxfile[itm[idx.keyval]].push(itmfilter);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fs.outputJSONSync(idxsrc, idxfile);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
if (crud != "I") {
|
|
|
|
//update lastupdatedata to inform something change
|
|
|
|
const confschema = fs.readJSONSync(`${objectPathname}/conf.json`);
|
|
|
|
confschema.lastupdatedata = dayjs().toISOString();
|
|
|
|
fs.outputJSONSync(`${objectPathname}/conf.json`, confschema);
|
|
|
|
}
|
|
|
|
return { status: 200, ref: "Odmdb", msg: "successreindex", data: {} };
|
|
|
|
};
|
|
|
|
Odmdb.updatefromidxall = (objectname, idxname, data, lastupdate) => {
|
|
|
|
/**
|
|
|
|
* Update all itm of objectname from index idx/idxname with data
|
|
|
|
* if itm exist in local and not in data then /ojectname/conf.json.lastupdate = now
|
|
|
|
* not then /ojectname/conf.json.lastupdate = lastupdate
|
|
|
|
* this way mean next time server A want to refresh from B its lastupdate < than on A
|
|
|
|
*/
|
|
|
|
let conflastupdate = 0;
|
|
|
|
let localidx = {};
|
|
|
|
if (fs.existsSync(`../nationchains/${objectname}/idx/${idxname}`)) {
|
|
|
|
localidx = fs.readJsonSync(`../nationchains/${objectname}/idx/${idxname}`);
|
|
|
|
}
|
|
|
|
Object.keys(data).forEach((id) => {
|
|
|
|
if (localidx[id]) {
|
|
|
|
if (
|
|
|
|
localidx[id].dt_update &&
|
|
|
|
data[id].dt_update &&
|
|
|
|
localidx[id].dt_update > data[id].dt_update
|
|
|
|
) {
|
|
|
|
// means local information is fresher than the one in data for replacement
|
|
|
|
// .toISIString ex: 2019-01-25T02:00:00.000Z'
|
|
|
|
conflastupdate = dayjs().toISOString();
|
|
|
|
} else {
|
|
|
|
// replace itm with data
|
|
|
|
localidx[id] = data[id];
|
|
|
|
fs.outputJsonSync(
|
|
|
|
`../nationchains/${objectname}/itm/${id}.json`,
|
|
|
|
data[id]
|
|
|
|
);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// add itm
|
|
|
|
localidx[id] = data[id];
|
|
|
|
fs.outputJsonSync(
|
|
|
|
`../nationchains/${objectname}/itm/${id}.json`,
|
|
|
|
data[id]
|
|
|
|
);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
//check if it miss id in fresher update means conf.lastupdate will be now to indicate
|
|
|
|
Object.keys(localidx).forEach((id) => {
|
|
|
|
if (!data[id]) {
|
|
|
|
conflastupdate = dayjs().toISOString();
|
|
|
|
}
|
|
|
|
});
|
|
|
|
// update the object files
|
|
|
|
if (conflastupdate == 0) conflastupdate = lastupdate;
|
|
|
|
fs.outputJSONSync(`../nationchains/${objectname}/idx/${idxname}`, localidx);
|
|
|
|
const objconf = fs.readJsonSync(`../nationchains/${objectname}/conf.json`);
|
|
|
|
objconf.lastupdate = conflastupdate;
|
|
|
|
fs.outputJsonSync(`../nationchains/${objectname}/conf.json`, objconf);
|
|
|
|
return {
|
|
|
|
status: 200,
|
|
|
|
ref: "Odmdb.js",
|
|
|
|
info: "Successfullupdate",
|
|
|
|
data: { objectname, idxname, lastupdate },
|
|
|
|
};
|
|
|
|
};
|
|
|
|
module.exports = Odmdb;
|