2023-04-13 07:46:35 +02:00
|
|
|
const glob = require("glob");
|
|
|
|
const path = require("path");
|
|
|
|
const fs = require("fs-extra");
|
2023-05-31 15:19:21 +02:00
|
|
|
const dayjs = require("dayjs");
|
2023-06-28 15:23:17 +02:00
|
|
|
const axios = require("axios");
|
2023-05-31 15:19:21 +02:00
|
|
|
const conf = require(`${process.env.dirtown}/conf.json`);
|
2023-04-13 07:46:35 +02:00
|
|
|
const Checkjson = require(`./Checkjson.js`);
|
|
|
|
|
|
|
|
/* This manage Objects for indexing and check and act to CRUD
|
|
|
|
objectpath/objects/schema/objectName.json
|
|
|
|
/objectNames/searchindes/objectName_valueofkey_uuildlist.json
|
|
|
|
/objectNames/uuid.json
|
|
|
|
|
|
|
|
*/
|
|
|
|
const Odmdb = {};
|
|
|
|
/*
|
|
|
|
Input: metaobject => data mapper of Key: Value
|
|
|
|
|
|
|
|
objname + an object {} + action Checkjson => get a valid or not answer
|
|
|
|
objname + an object {} + action search => apply matching algo to find probalistic object id
|
|
|
|
objname + action index => update /searcindex of objects concern
|
|
|
|
|
|
|
|
*/
|
2023-05-31 15:19:21 +02:00
|
|
|
|
|
|
|
Odmdb.setObject = (schemaPath, objectPath, objectName, schema, lgjson, lg) => {
|
|
|
|
/**
|
|
|
|
*
|
2023-04-27 06:17:20 +02:00
|
|
|
* @schemapath {string} path to create or replace a schema ${schemaPath}/schema/
|
2023-05-31 15:19:21 +02:00
|
|
|
* @objectPath {string} path where object are store
|
2023-04-27 06:17:20 +02:00
|
|
|
* @objectName {string} name of the object
|
|
|
|
* @schema {object} the json schema for this object
|
|
|
|
* @lgjson {object} the json file for a specific language
|
|
|
|
* @lg {string} the 2 letters language
|
|
|
|
*
|
|
|
|
* a shema :
|
|
|
|
* schemaPath/schema/objectName.json
|
|
|
|
* /lg/objectName_{lg}.json
|
|
|
|
* an object :
|
|
|
|
* objectPath/objectName/idx/confjson ={"schema":"relativpathfile or http"}
|
|
|
|
* /uniqueid.json defining schema
|
|
|
|
*
|
2023-05-31 15:19:21 +02:00
|
|
|
*/
|
|
|
|
if (!fs.existsSync(schemaPath)) {
|
|
|
|
return {
|
|
|
|
status: 404,
|
|
|
|
ref: "Odmdb",
|
|
|
|
info: "pathnamedoesnotexist",
|
|
|
|
moreinfo: { fullpath: schemaPath },
|
|
|
|
};
|
2023-04-13 07:46:35 +02:00
|
|
|
}
|
2023-05-31 15:19:21 +02:00
|
|
|
if (!fs.existsSync(objectPath)) {
|
|
|
|
return {
|
|
|
|
status: 404,
|
|
|
|
ref: "Odmdb",
|
|
|
|
info: "pathnamedoesnotexist",
|
|
|
|
moreinfo: { fullpath: objectPath },
|
|
|
|
};
|
2023-04-13 07:46:35 +02:00
|
|
|
}
|
|
|
|
// store schema file if not empty undefined or {}
|
2023-05-31 15:19:21 +02:00
|
|
|
if (
|
|
|
|
schema &&
|
|
|
|
!(Object.keys(schema).length === 0 && schema.constructor === Object)
|
|
|
|
) {
|
|
|
|
fs.outputJSONSync(`${schemaPath}/schema/${objectName}.json`, schema, {
|
|
|
|
spaces: 2,
|
|
|
|
});
|
2023-04-13 07:46:35 +02:00
|
|
|
}
|
2023-05-31 15:19:21 +02:00
|
|
|
if (
|
|
|
|
lgjson &&
|
|
|
|
lg &&
|
|
|
|
!(Object.keys(lgjson).length === 0 && lgjson.constructor === Object)
|
|
|
|
) {
|
|
|
|
fs.outputJSONSync(`${schemaPath}/lg/${objectName}_${lg}.json`, lgjson, {
|
|
|
|
spaces: 2,
|
|
|
|
});
|
2023-04-13 07:46:35 +02:00
|
|
|
}
|
|
|
|
//create environnement object with the new schema config
|
2023-05-31 15:19:21 +02:00
|
|
|
if (!fs.existsSync(`${objectPath}/${objectName}`)) {
|
|
|
|
fs.outputJsonSync(
|
|
|
|
`${objectPath}/${objectName}/idx/confjson`,
|
|
|
|
{ schema: `${schemaPath}/schema/${objectName}.json` },
|
|
|
|
{ spaces: 2 }
|
|
|
|
);
|
2023-04-13 07:46:35 +02:00
|
|
|
}
|
2023-05-31 15:19:21 +02:00
|
|
|
return { status: 200 };
|
|
|
|
};
|
2023-04-13 07:46:35 +02:00
|
|
|
|
|
|
|
Odmdb.schema = (schemaPath, objectName, withschemacheck) => {
|
|
|
|
// Return schema if exist and objectpath contain objectName { status:200;data:schema}
|
|
|
|
if (!fs.existsSync(`${schemaPath}/${objectName}`))
|
|
|
|
return {
|
|
|
|
status: 404,
|
|
|
|
info: "|odmdb|schemapathnamedoesnotexist",
|
|
|
|
moreinfo: `${schemaPath}/${objectName}`,
|
|
|
|
};
|
|
|
|
if (!fs.existsSync(`${objectPath}/schema/${objectName}.json`)) {
|
|
|
|
return {
|
|
|
|
status: 404,
|
|
|
|
info: `|odmdb|schemanotfound`,
|
|
|
|
moreinfo: `file not found ${schemaPath}/schema/${objectName}.json`,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
const schema = fs.readJsonSync(`${schemaPath}/schema/${objectName}.json`);
|
|
|
|
// check schema apx validity specificities primary unique ans searchindex
|
|
|
|
if (withschemacheck) {
|
2023-05-31 15:19:21 +02:00
|
|
|
if (!schema.apxprimarykey) {
|
2023-04-13 07:46:35 +02:00
|
|
|
// code 422: unprocessable Content
|
|
|
|
return {
|
|
|
|
status: 422,
|
|
|
|
info: "|Odmdb|apxprimarykeynotfound",
|
|
|
|
moreinfo: `${schemaPath}/schema/${objectName}.json`,
|
|
|
|
};
|
|
|
|
} else {
|
|
|
|
if (
|
|
|
|
!(
|
|
|
|
schema.apxsearchindex[schema.apxprimarykey] &&
|
|
|
|
schema.apxsearchindex[schema.apxprimarykey].list
|
|
|
|
)
|
|
|
|
) {
|
|
|
|
return {
|
|
|
|
status: 422,
|
|
|
|
info: "|Odmdb|apxprimaryketnotinsearchindexlist",
|
|
|
|
moreinfo: `${schemaPath}/schema/${objectName}.json`,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
if (schema.apxuniquekey) {
|
|
|
|
schema.apxuniquekey.forEach((k) => {
|
|
|
|
if (
|
|
|
|
!(
|
|
|
|
schema.apxsearchindex[k] &&
|
|
|
|
schema.apxsearchindex[k][schema.apxprimarykey]
|
|
|
|
)
|
|
|
|
) {
|
|
|
|
return {
|
|
|
|
status: 422,
|
|
|
|
info: "|Odmdb|apxuniquekeynotinsearchindex",
|
|
|
|
moreinfo: `${schemaPath}/schema/${objectName}.json`,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
const validschema = Checkjson.schema.validation(schema);
|
|
|
|
if (validschema.status != 200) return validschema;
|
|
|
|
}
|
|
|
|
return {
|
|
|
|
status: 200,
|
|
|
|
data: schema,
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
2023-06-28 15:23:17 +02:00
|
|
|
//Odmdb.Checkjson = (objectPath, objectName, data, withschemacheck) => {
|
2023-04-13 07:46:35 +02:00
|
|
|
/*
|
|
|
|
@objectPath path to the folder that contain /objects/objectName/ /lg/objectName_{lg}.json /schema/objectName.json
|
|
|
|
@objectName name of object
|
|
|
|
@data data to check based on schema objectName definition
|
|
|
|
|
|
|
|
@return status:200 Data is consistent with schema and primarykey does not exist
|
|
|
|
status:201 Data is consistent with schema and primarykey does already exist
|
|
|
|
status:other means unconsistent schema:
|
|
|
|
404: schema does not exist
|
|
|
|
or unconsitent data and schema from Checkjson.js Checkjson.schema.data
|
|
|
|
|
|
|
|
*/
|
2023-06-28 15:23:17 +02:00
|
|
|
/* const res = { status: 200,ref="Odmdb",msg:"",data:{} };
|
2023-04-13 07:46:35 +02:00
|
|
|
//get schema link of object
|
2023-05-31 15:19:21 +02:00
|
|
|
const schemaPath = fs.readJsonSync(
|
2023-06-28 15:23:17 +02:00
|
|
|
`${objectPath}/${objectName}/idx/conf.json`
|
2023-05-31 15:19:21 +02:00
|
|
|
)["schema"];
|
|
|
|
if (schemaPath.substring(0, 4) == "http") {
|
|
|
|
// lance requete http pour recuperer le schema
|
|
|
|
} else {
|
2023-06-28 15:23:17 +02:00
|
|
|
res.data.schema = Odmdb.schema(objectPath, objectName, withschemacheck);
|
2023-05-31 15:19:21 +02:00
|
|
|
}
|
2023-06-28 15:23:17 +02:00
|
|
|
// check schema validity in case withschemacheck
|
|
|
|
if (schema.status != 200) return ;
|
2023-04-13 07:46:35 +02:00
|
|
|
console.log("SCHEMA for checking:");
|
|
|
|
console.log(schema.data);
|
|
|
|
console.log("DATA to check:");
|
|
|
|
console.log(data);
|
|
|
|
// withschemacheck at false, if check then it is done at Odmdb.schema
|
|
|
|
const validate = Checkjson.schema.data(schema.data, data, false);
|
|
|
|
if (validate.status != 200) {
|
|
|
|
return validate;
|
|
|
|
}
|
|
|
|
if (
|
|
|
|
schema.data.apxprimarykey &&
|
|
|
|
data[k] &&
|
|
|
|
fs.existsSync(`${objectPath}/${objectName}/${data[k]}.json}`)
|
|
|
|
) {
|
|
|
|
res.status = 201; // means created => exist an object with this primary key
|
|
|
|
}
|
|
|
|
if (schema.data.apxuniquekey) {
|
|
|
|
schema.data.apxuniquekey.forEach((k) => {
|
|
|
|
if (
|
|
|
|
data[k] &&
|
|
|
|
fs.existsSync(
|
|
|
|
`${objectPath}/${objectName}/searchindex/${objectName}_${k}_${schema.data.apxprimarykey}.json}`
|
|
|
|
) &&
|
|
|
|
fs.readJsonSync(
|
|
|
|
`${objectPath}/${objectName}/searchindex/${objectName}_${k}_${schema.data.apxprimarykey}.json}`
|
|
|
|
)[k]
|
|
|
|
) {
|
|
|
|
res.status = 201; // means created => exist as primary key
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
return res;
|
|
|
|
};
|
2023-06-28 15:23:17 +02:00
|
|
|
*/
|
|
|
|
Odmdb.getSchema=async (schemaPath,validschema)=>{
|
|
|
|
/**
|
|
|
|
* @schemaPath public http link or local path adminapi/schema/objectName.json or /tribename/schema/objectName
|
|
|
|
* @return schema or {}
|
|
|
|
*/
|
|
|
|
const res={status:200,data:{schema:{}}}
|
|
|
|
if (schemaPath.slice(-5)!=".json") schemaPath+=".json";
|
|
|
|
if (schemaPath.substring(0, 4) == "http") {
|
|
|
|
// lance requete http pour recuperer le schema avec un await axios
|
|
|
|
} else {
|
|
|
|
if (schemaPath.substring(0,9)=="adminapi/"){
|
|
|
|
schemaPath=`${conf.dirapi}/${schemaPath}`
|
|
|
|
}else{
|
|
|
|
schemaPath=`${conf.dirtown}/tribes/${schemaPath}`
|
|
|
|
}
|
|
|
|
if (!fs.existsSync(schemaPath)){
|
|
|
|
return {status:404, ref:"Odmdb", msg:"schemanotfound", data:{schemaPath,schema:{}}}
|
|
|
|
}
|
|
|
|
res.data.schema=fs.readJsonSync(schemaPath)
|
|
|
|
if (validschema ||1==1){
|
|
|
|
const check = Checkjson.schema.validation(res.data.schema)
|
|
|
|
if (check.err.length>0) {
|
|
|
|
res.status=check.status
|
|
|
|
res.data.err=check.err
|
|
|
|
}
|
|
|
|
//check json schema for Odmdb context
|
|
|
|
if (!res.data.schema.apxprimarykey || !res.data.schema.properties[res.data.schema.apxprimarykey]){
|
|
|
|
// primarykey require for Odmdb
|
|
|
|
res.status=406
|
|
|
|
if (!res.data.err) res.data.err=[];
|
|
|
|
res.data.err.push({ref:"Odmdb",msg:"novalidprimarykey",data:{apxprimarykey:res.data.schema.apxprimarykey}})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return res
|
|
|
|
}
|
2023-04-13 07:46:35 +02:00
|
|
|
Odmdb.search = (objectPath, objectName, search) => {
|
|
|
|
/*
|
|
|
|
@search= {
|
|
|
|
txt: string,
|
|
|
|
algo: match | pattern | fuzzy
|
|
|
|
fieldstring:[list of field],
|
|
|
|
indexfilter:{index1:[val1,val2 | ] }
|
|
|
|
}
|
|
|
|
Return data:[uuids]
|
|
|
|
|
|
|
|
example: search exact match hill in townId
|
|
|
|
heavy search={txt:"hill",algo:"match",fieldstring:"toxnId"}
|
|
|
|
light search={txt:"hill", algo:"match", indexfilter:{"key":"townId","value":[]}}
|
|
|
|
light search={txt:"hill", algo:"match", indexfilter:{"key":"nationId","value":"ants"}}
|
|
|
|
|
|
|
|
*/
|
|
|
|
const schema = Odmdb.schema(objectPath, objectName);
|
|
|
|
if (schema.status != 200) return schema;
|
|
|
|
};
|
|
|
|
Odmdb.get = (objectPath, objectName, uuidprimarykeyList, fieldList) => {
|
|
|
|
/*
|
2023-06-28 15:23:17 +02:00
|
|
|
@objectPath where object are store (where /object/conf.json indicate where the schema is)
|
2023-04-13 07:46:35 +02:00
|
|
|
@uuidprimarykeyList list of uuid requested
|
|
|
|
@fieldList key to return for each object
|
2023-06-28 15:23:17 +02:00
|
|
|
Return {status:200; data:{uuid:{data filter by @fieldList},uuid:"notfound"}}
|
2023-04-13 07:46:35 +02:00
|
|
|
*/
|
|
|
|
const res = { status: 200, data: {} };
|
2023-06-28 15:23:17 +02:00
|
|
|
uuidprimarykeyList.forEach(id => {
|
|
|
|
if (fs.existsSync(`${objectPath}/${objectName}/itm/${id}.json`)) {
|
2023-04-13 07:46:35 +02:00
|
|
|
const objectdata = fs.readJsonSync(
|
2023-06-28 15:23:17 +02:00
|
|
|
`${objectPath}/${objectName}/itm/${id}.json`
|
2023-04-13 07:46:35 +02:00
|
|
|
);
|
|
|
|
if (!fieldList) {
|
2023-06-28 15:23:17 +02:00
|
|
|
res.data[id]=objectdata;
|
2023-04-13 07:46:35 +02:00
|
|
|
} else {
|
|
|
|
const objinfo = {};
|
|
|
|
fieldlList.forEach((k) => {
|
|
|
|
if (objectdata[k]) objinfo[k] = objectdata[k];
|
|
|
|
});
|
2023-06-28 15:23:17 +02:00
|
|
|
res.data[id]=objinfo;
|
2023-04-13 07:46:35 +02:00
|
|
|
}
|
|
|
|
} else {
|
2023-06-28 15:23:17 +02:00
|
|
|
res.data[id]="notfound";
|
2023-04-13 07:46:35 +02:00
|
|
|
}
|
|
|
|
});
|
|
|
|
return res;
|
|
|
|
};
|
2023-06-28 15:23:17 +02:00
|
|
|
Odmdb.create = (objectPath, objectName, data, accessright) => {
|
2023-04-13 07:46:35 +02:00
|
|
|
/*
|
|
|
|
Create an objects data into objectName
|
|
|
|
@objectPath path to the folder that contain /objects/objectName/ /objectsInfo/objectName_lg.json /objectsMeta/objectName.json
|
|
|
|
@objectName name of object
|
|
|
|
@data data to check based on objectsMeta definition
|
2023-06-28 15:23:17 +02:00
|
|
|
@accessright a string with accessright of the user on this objectName ex: "CRUDO" or "R" or "O"
|
2023-04-13 07:46:35 +02:00
|
|
|
*/
|
|
|
|
};
|
2023-06-28 15:23:17 +02:00
|
|
|
Odmdb.update = async (objectPath, objectName, data, id, accessright) => {
|
2023-04-13 07:46:35 +02:00
|
|
|
/*
|
|
|
|
Create an objects data into objectName
|
|
|
|
@objectPath path to the folder that contain /objects/objectName/ /objectsInfo/objectName_lg.json /objectsMeta/objectName.json
|
|
|
|
@objectName name of object
|
|
|
|
@data data to check based on objectsMeta definition
|
|
|
|
*/
|
2023-06-28 15:23:17 +02:00
|
|
|
if (!fs.existsSync(`${objectPath}/${objectName}/itm/${id}.json`)){
|
|
|
|
return {status:404,ref:"Odmdb",msg:"itmnotfound",data:{objectPath,objectName,id}}
|
|
|
|
}
|
|
|
|
const currentobj=fs.readJSONSync(`${objectPath}/${objectName}/itm/${id}.json`)
|
|
|
|
Object.keys(data).forEach(k=>{
|
|
|
|
currentobj[k]=data[k]
|
|
|
|
})
|
|
|
|
if (currentobj.dt_update) currentobj.dt_update=dayjs().toISOString();
|
|
|
|
const schemaPath = fs.readJsonSync(
|
|
|
|
`${objectPath}/${objectName}/conf.json`
|
|
|
|
)["schema"];
|
|
|
|
const getschema = await Odmdb.getSchema(schemaPath);
|
|
|
|
if (getschema.status!=200 || Object.keys(getschema.data.schema).length==0) {
|
|
|
|
console.log('this is not suppose to happen in Odmdb',Object.keys(getschema.data.schema))
|
|
|
|
return getschema
|
|
|
|
}
|
|
|
|
const schema=getschema.data.schema;
|
|
|
|
const check = Checkjson.schema.data(schema,currentobj,false);
|
|
|
|
console.log(check)
|
|
|
|
if (check.err.length==0){
|
|
|
|
// update
|
|
|
|
fs.outputJsonSync(`${objectPath}/${objectName}/itm/${id}.json`,currentobj)
|
|
|
|
//@todo select index file to generate depending of k update currently we re-index all
|
|
|
|
|
|
|
|
return {status:200,ref:"Odmdb",msg:"updatesuccessfull"}
|
|
|
|
}else{
|
|
|
|
return {status:409, ref:"Odmdb",msg:"datavsschemaunconsistent",data:check.err}
|
|
|
|
}
|
2023-04-13 07:46:35 +02:00
|
|
|
};
|
2023-06-28 15:23:17 +02:00
|
|
|
Odmdb.delete = (objectPath, objectName, data,accessright) => {
|
2023-04-13 07:46:35 +02:00
|
|
|
/*
|
|
|
|
Create an objects data into objectName
|
|
|
|
@objectPath path to the folder that contain /objects/objectName/ /objectsInfo/objectName_lg.json /objectsMeta/objectName.json
|
|
|
|
@objectName name of object
|
|
|
|
@data data to check based on objectsMeta definition
|
|
|
|
*/
|
|
|
|
};
|
|
|
|
/*console.log("test Odmdb");
|
|
|
|
console.log(
|
|
|
|
Odmdb.check(
|
|
|
|
"/media/phil/usbfarm/apxtrib/nationchains/socialworld/objects",
|
|
|
|
"nations",
|
|
|
|
{ nationId: "123", status: "unchain" }
|
|
|
|
)
|
|
|
|
);*/
|
2023-05-31 15:19:21 +02:00
|
|
|
|
|
|
|
Odmdb.updatefromidxall = (objectname, idxname, data, lastupdate) => {
|
|
|
|
/**
|
|
|
|
* Update all itm of objectname from index idx/idxname with data
|
|
|
|
* if itm exist in local and not in data then /ojectname/conf.json.lastupdate = now
|
|
|
|
* not then /ojectname/conf.json.lastupdate = lastupdate
|
|
|
|
* this way mean next time server A want to refresh from B its lastupdate < than on A
|
|
|
|
*/
|
|
|
|
let conflastupdate = 0;
|
|
|
|
let localidx = {};
|
|
|
|
if (
|
|
|
|
fs.existsSync(`${conf.dirapi}/nationchains/${objectname}/idx/${idxname}`)
|
|
|
|
) {
|
|
|
|
localidx = fs.readJsonSync(
|
|
|
|
`${conf.dirapi}/nationchains/${objectname}/idx/${idxname}`
|
|
|
|
);
|
|
|
|
}
|
|
|
|
Object.keys(data).forEach((id) => {
|
|
|
|
if (localidx[id]) {
|
|
|
|
if (
|
|
|
|
localidx[id].dt_update &&
|
|
|
|
data[id].dt_update &&
|
|
|
|
localidx[id].dt_update > data[id].dt_update
|
|
|
|
) {
|
|
|
|
// means local information is fresher than the one in data for replacement
|
2023-06-28 15:23:17 +02:00
|
|
|
// .toISIString ex: 2019-01-25T02:00:00.000Z'
|
|
|
|
conflastupdate = dayjs().toISOString();
|
2023-05-31 15:19:21 +02:00
|
|
|
} else {
|
|
|
|
// replace itm with data
|
|
|
|
localidx[id] = data[id];
|
|
|
|
fs.outputJsonSync(
|
|
|
|
`${conf.dirapi}/nationchains/${objectname}/itm/${id}.json`,
|
|
|
|
data[id]
|
|
|
|
);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// add itm
|
|
|
|
localidx[id] = data[id];
|
|
|
|
fs.outputJsonSync(
|
|
|
|
`${conf.dirapi}/nationchains/${objectname}/itm/${id}.json`,
|
|
|
|
data[id]
|
|
|
|
);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
//check if it miss id in fresher update means conf.lastupdate will be now to indicate
|
|
|
|
Object.keys(localidx).forEach((id) => {
|
|
|
|
if (!data[id]) {
|
|
|
|
conflastupdate = dayjs();
|
|
|
|
}
|
|
|
|
});
|
|
|
|
// update the object files
|
|
|
|
if (conflastupdate == 0) conflastupdate = lastupdate;
|
|
|
|
fs.outputJSONSync(
|
|
|
|
`${conf.dirapi}/nationchains/${objectname}/idx/${idxname}`,
|
|
|
|
localidx
|
|
|
|
);
|
|
|
|
const objconf = fs.readJsonSync(
|
|
|
|
`${conf.dirapi}/nationchains/${objectname}/conf.json`
|
|
|
|
);
|
|
|
|
objconf.lastupdate = conflastupdate;
|
|
|
|
fs.outputJsonSync(
|
|
|
|
`${conf.dirapi}/nationchains/${objectname}/conf.json`,
|
|
|
|
objconf
|
|
|
|
);
|
|
|
|
return {
|
|
|
|
status: 200,
|
|
|
|
ref: "Odmdb.js",
|
|
|
|
info: "Successfullupdate",
|
|
|
|
data: { objectname, idxname, lastupdate },
|
|
|
|
};
|
|
|
|
};
|
2023-04-13 07:46:35 +02:00
|
|
|
module.exports = Odmdb;
|