This commit is contained in:
2023-12-05 07:42:35 +01:00
parent 5997ab759a
commit 763034e077
30 changed files with 1484 additions and 2066 deletions

View File

@@ -12,6 +12,7 @@ Checkjson.schema.properties = {};
Checkjson.schema.properties.type = {};
Checkjson.schema.properties.type.string = (str) => typeof str === "string";
Checkjson.schema.properties.type.array = (val) => Array.isArray(val);
Checkjson.schema.properties.type.object = (val) => typeof val === 'object' && val !== null && !Array.isArray(val);
Checkjson.schema.properties.type.number = (n) => typeof n === "number";
Checkjson.schema.properties.type.boolean = (n) => typeof n === "boolean";
Checkjson.schema.properties.type.integer = (n) =>
@@ -130,18 +131,25 @@ Checkjson.schema.validation = (schema) => {
}
if (
properties[p].type &&
typeof properties[p].type === "object" &&
Array.isArray(properties[p].type)
) {
properties[p].type.forEach((tp) => {
if (!Checkjson.schema.properties.type[tp])
typeof properties[p].type === "object"){
if (properties[p]['$ref']){
//This is manage by Odmdb.schema to load recursively complex schema
multimsg.push({
ref: "Checkjson",
msg: "schemaerrtypedoesnotexist",
data: { propertie: p, type: properties[p].type },
});
});
}
msg: "externalrefnotload",
data: { propertie: p, ref: properties[p]["$ref"]},
});
}
//case type=="object" with properties
if (properties[p].properties){
const checksub = Checkjson.schema.validation(properties[p])
if (checksub.status!=200){
multimsg = multimsg.concat(checksub.multimsg)
}
}
// if not $ref or no properties then any object is accepted
}
if (
properties[p].format &&
!Checkjson.schema.properties.format[properties[p].format]
@@ -189,40 +197,38 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
console.log('---------')
console.log(data)
*/
if (withschemacheck) {
const validschema = Checkjson.schema.validation(schema);
if (validschema.status != 200) return validschema;
}
const multimsg = [];
const res = {
status: 200,
data: { itm: data },
};
if (schema.properties) {
const properties = schema.properties;
const propertiescheck=(properties,subdata)=>{
// properties ={prop1:{type,format},prop2:{type:object,...}}
// subdata={prop1,prop2}
// Return [] => no error, else 1 item per error {msg,ref:checkjson,data}
let multimsg=[]
Object.keys(properties).forEach((p) => {
//type is mandatory in a propertie
if (data[p]) {
if (subdata[p]) {
if (properties[p].properties){
//means it is a subobject
multimsg=multimsg.concat(propertiescheck(properties[p].properties,subdata[p]))
}
//type can be a list of string; number, array, boolean, object, null
const typlist =
properties[p].type && typeof properties[p].type === "string"
? [properties[p].type]
: properties[p].type;
let valid = false;
typlist.forEach((typ) => {
// at least one test have to be ok
if (Checkjson.schema.properties.type[typ](data[p])) valid = true;
// at least one test have to be valid
if (Checkjson.schema.properties.type[typ](subdata[p])) valid = true;
});
if (!valid)
multimsg.push({
ref: "Checkjson",
msg: "dataerrpropertie",
data: { key: p, value: data[p] },
data: { key: p, value: subdata[p] },
});
if (
properties[p].minLength &&
!Checkjson.schema.properties.minLength(
data[p],
subdata[p],
properties[p].minLength
)
) {
@@ -231,7 +237,7 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
msg: "dataerrpropertie",
data: {
key: p,
value: data[p],
value: subdata[p],
minLength: properties[p].minLength,
},
});
@@ -239,7 +245,7 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
if (
properties[p].maxLength &&
!Checkjson.schema.properties.maxLength(
data[p],
subdata[p],
properties[p].maxLength
)
) {
@@ -248,7 +254,7 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
msg: "dataerrpropertie",
data: {
key: p,
value: data[p],
value: subdata[p],
maxLength: properties[p].maxLength,
},
});
@@ -256,7 +262,7 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
if (
properties[p].multipleOf &&
!Checkjson.schema.properties.multipleOf(
data[p],
subdata[p],
properties[p].multipleOf
)
) {
@@ -265,7 +271,7 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
msg: "dataerrpropertie",
data: {
key: p,
value: data[p],
value: subdata[p],
multipleOf: properties[p].multipleOf,
},
});
@@ -279,7 +285,7 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
// test range
if (
!Checkjson.schema.properties.range(
data[p],
subdata[p],
properties[p].minimum,
properties[p].exclusiveMinimum,
properties[p].maximum,
@@ -291,7 +297,7 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
msg: "dataerrpropertie",
data: {
key: p,
value: data[p],
value: subdata[p],
minimum: properties[p].minimum,
maximum: properties[p].maximum,
exclusiveMinimum: properties[p].exclusiveMinimum,
@@ -302,12 +308,12 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
}
if (
properties[p].enum &&
!Checkjson.schema.properties.enum(data[p], properties[p].enum)
!Checkjson.schema.properties.enum(subdata[p], properties[p].enum)
) {
multimsg.push({
ref: "Checkjson",
msg: "dataerrpropertie",
data: { key: p, value: data[p], enumlst: properties[p].enum },
data: { key: p, value: subdata[p], enumlst: properties[p].enum },
});
}
if (properties[p].format) {
@@ -316,12 +322,12 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
}
if (
properties[p].pattern &&
!Checkjson.schema.properties.pattern(data[p], properties[p].pattern)
!Checkjson.schema.properties.pattern(subdata[p], properties[p].pattern)
) {
multimsg.push({
ref: "Checkjson",
msg: "dataerrpropertie",
data: { key: p, value: data[p], pattern: properties[p].pattern },
data: { key: p, value: subdata[p], pattern: properties[p].pattern },
});
}
} else if (schema.required && schema.required.includes(p)) {
@@ -332,18 +338,27 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
});
}
});
} //end properties
if (schema.apxid) {
res.data.apxid = data[schema.apxid];
return multimsg
};//end propertiescheck()
if (withschemacheck) {
const validschema = Checkjson.schema.validation(schema);
if (validschema.status != 200) return validschema;
}
if (multimsg.length > 0) {
let multi=propertiescheck(schema.properties,data)
const res = {};
if (multi.length > 0) {
res.status = 417;
res.multimsg = multimsg;
res.multimsg = multi;
} else {
res.status = 200;
res.ref = "Checkjson";
res.msg = "validcheck";
}
if (schema.apxid) {
res.data={apxid : data[schema.apxid],itm:data};
}
return res;
};
if (typeof module !== "undefined") module.exports = Checkjson;

View File

@@ -113,6 +113,7 @@ Nations.updateobjectsfromfreshesttown = (dnstownlist, objectidx) => {
});
}
}
return {status:200,ref:"Nations",msg:"updated",data:{}};
};
Nations.synchronizeold = () => {
/*

View File

@@ -11,29 +11,38 @@ const conf = require(`${process.env.dirtown}/conf.json`);
* volatile notification message from tribe activities to Pagans / person ()
*
*/
const Notifications = {};
Notifications.get = (alias, tribeId) => {
const notiffile = `${conf.dirtown}/tribes/${req.params.tribeId}/notifications/${req.params.alias}.json`;
const msg = fs.existsSync(notiffile) ? fs.readJSONSync(notiffile) : {};
return {
status: 200,
ref: "Notification",
msg: "Messagelist",
data: { notif: [{ tribeId, msg }] },
};
};
Notifications.sendsms = async (data, tribeId) => {
/**
* Never use need wallet in mailjet to test
* To set up with mailjet see https://dev.mailjet.com/sms/guides/send-sms-api/#authentication
*
* @param {string} data.To a phone number with international +3360101010101
* @param {string} data.Text text to send
*
* a conf.sms with {url:"smsurl", Token:"", From:""}
*
*
*/
/**
* Never use need wallet in mailjet to test
* To set up with mailjet see https://dev.mailjet.com/sms/guides/send-sms-api/#authentication
*
* @param {string} data.To a phone number with international +3360101010101
* @param {string} data.Text text to send
*
* a conf.sms with {url:"smsurl", Token:"", From:""}
*
*
*/
if (!conf.sms) {
return {
status: 412,
ref: "Notifications",
msg: "missingconf",
tribe: tribeId,
data: { tribe: tribeId },
};
}
let missingk = [][("To", "Text")].forEach((k) => {
@@ -46,10 +55,10 @@ Notifications.sendsms = async (data, tribeId) => {
status: 428,
ref: "Notifications",
msg: "missingdata",
missingk: missingk,
data: { missingk: missingk },
};
}
let confsms= conf.sms;
let confsms = conf.sms;
if (
fs.existsSync(
`${process.env.dirtown}/tribes/itm/${req.session.header.xtribe}.json`
@@ -60,19 +69,28 @@ Notifications.sendsms = async (data, tribeId) => {
);
if (conftrib.sms) confsms = conftrib.sms;
}
data.From=confsms.From
const sendsms= await axios.post(confsms.url,
{
data.From = confsms.From;
const sendsms = await axios.post(confsms.url, {
headers: {
Authorization: `Bearer ${confsms.MJ_TOKEN}`,
"Content-Type": "application/json",
},
body: JSON.stringify(data),
});
if (sendsms.status==200){
return {status:200,ref:"Notifications",msg:"successfullsentsms"};
}else{
return {status:sendsms.status, ref:"Notifications",msg:"errsendsms",err:sendsms.data}
if (sendsms.status == 200) {
return {
status: 200,
ref: "Notifications",
msg: "successfullsentsms",
data: {},
};
} else {
return {
status: sendsms.status,
ref: "Notifications",
msg: "errsendsms",
data: { err: sendsms.data },
};
}
/* si tout se passe bien:
@@ -100,36 +118,39 @@ Notifications.sendsms = async (data, tribeId) => {
Notifications.sendmail = async (data, tribe) => {
/**
* @param {string} data.From an email authorized by smtp used priority from header xtribe
* @param {string} data.To list of email separate by ,
* @param {string} [data.from] an email authorized by smtp used priority from header xtribe
* @param {string} data.to list of email separate by ,
* @param {string} data.subject
* @param {string} data.html
* @param {string} data.text
* @param {string} data.Cc list of email in copy
* @param {string} data.Bcc list of email in hidden copy
* @param {string} data.filelist an array of object {filename:"",pathfile:"",filetype:""} pathfile to attach as file name of type:filetype "filename" to this email
* @param {string} [data.Cc] list of email in copy
* @param {string} [data.Bcc] list of email in hidden copy
* @param {string} [data.filelist} an array of object {filename:"",pathfile:"",filetype:""} pathfile to attach as file name of type:filetype "filename" to this email
* example of filetype : "text/plain", "text/csv", image/gif", "application/json", "application/zip"
*
* @example data
* {"From":"wall-ants.ndda.fr",
* "To":"wall-ants.ndda.fr",
* {"to":"wall-ants.ndda.fr",
* "subject":"Test",
* "html":"<h1>test welcome</h1>",
* "text":"test welcome",
* "attachments":[{filename:"text.txt",pathfile:"/media/phil/textA.txt","contenttype":"text/plain"}]
* }
* @return {object} { status: 200, ref:"pagans",msg:"aliasexist",data: { alias, publicKey } }
* @return {object}
* { status: 200, ref:"pagans",msg:"aliasexist",data: { alias, publicKey } }
*
*
*/
if (!conf.smtp) {
if (!conf.smtp || !conf.emailcontact) {
return {
status: 412,
ref: "Notifications",
msg: "missingconf",
tribe: tribeId,
data: { tribe: tribe },
};
}
if (!data.from) {
data.from = conf.emailcontact;
}
let missingk = [];
["from", "to", "subject", "html", "text"].forEach((k) => {
if (!data[k]) {
@@ -138,35 +159,59 @@ Notifications.sendmail = async (data, tribe) => {
});
if (missingk.lenght > 0) {
return {
status: 428,
status: 428,
ref: "Notifications",
msg: "missingdata",
missingk: missingk,
data: { missingk: missingk },
};
}
const conftribfile=`${process.env.dirtown}/tribes/itm/${tribe}.json`;
const confsmtp =(fs.existsSync(conftribfile))? fs.readJSONSync(conftribfile).smtp : conf.smtp;
let confsmtp = conf.smtp;
const conftribfile = `${process.env.dirtown}/tribes/itm/${tribe}.json`;
if (fs.existsSync(conftribfile)) {
const conftrib = fs.readJSONSync(conftribfile);
confsmtp = conftrib.smtp;
data.from = conftrib.emailcontact;
}
//const client = smtp.connect(confsmtp);
const transporter = await nodemailer.createTransport(confsmtp);
//@todo add attachments management
let missingfile=[]
if (missingfile.lenght > 0)
if (data.filelist) {
let missingfile = [];
data.filelist.forEach((fo) => {
if (fs.existsSync(fo.pathfile)){
}else{ missingfile.push(fo.pathfile);}
});
if (missingfile.lenght > 0)
return {
status: 428,
ref: "Notifications",
msg: "missingfile",
data: { missingfile: missingfile },
};
}
console.log("data:", data);
const res = await transporter.sendMail(data);
if (
res.accepted &&
data.to.split(",").reduce((acc, m) => acc && res.accepted.includes(m), true)
) {
data.accepted = res.accepted;
data.rejected = res.rejected;
return {
status: 428,
status: 200,
ref: "Notifications",
msg: "missingfile",
missingfile: missingfile,
msg: "successfullsentemail",
data,
};
try {
// Send the email
//const res = await client.sendMail(data)
const res = await transporter.sendMail(data)
//console.log('res envoie',res)
return { status: 200, ref: "Notifications", msg: "successfullsent", data };
} catch (err) {
//console.log('err envoie',err)
return { status: 502, ref: "Notifications", msg: "errsendmail", err: err };
} else if (res.accepted && res.rejected) {
data.accepted = res.accepted;
data.rejected = res.rejected;
return { status: 410, ref: "Notifications", msg: "errsendmail", data };
} else {
data.errmailer = res.err;
return { status: 417, ref: "Notifications", msg: "errsendmail", data };
}
};

View File

@@ -5,6 +5,7 @@ const dayjs = require("dayjs");
const axios = require("axios");
const conf = require(`${process.env.dirtown}/conf.json`);
const Checkjson = require(`./Checkjson.js`);
const { promiseHooks } = require("v8");
/**
* This manage Objects for indexing, searching, checking and act to CRUD
@@ -153,72 +154,100 @@ Odmdb.updateObject = (objectPathname, meta) => {};
* @return {status:200,data:{conf:"schemaconf",schema:"schemacontent"} }
*/
Odmdb.Schema = (objectPathname, validschema) => {
const getpath = (schemaPath) => {
if (schemaPath.slice(-5) != ".json") schemaPath += ".json";
if (schemaPath.substring(0, 4) == "http") {
// lance requete http pour recuperer le schema avec un await axios
} else {
if (schemaPath.substring(0, 9) == "adminapi/") {
schemaPath = `${conf.dirapi}/${schemaPath}`;
} else {
schemaPath = `${conf.dirtown}/tribes/${schemaPath}`;
}
if (!fs.existsSync(schemaPath)) {
return {};
} else {
return fs.readJsonSync(schemaPath);
}
}
};
const confschema = fs.readJsonSync(`${objectPathname}/conf.json`);
let schemaPath = confschema.schema;
const res = {
status: 200,
ref: "Odmdb",
msg: "getschema",
data: { conf: confschema },
};
if (schemaPath.slice(-5) != ".json") schemaPath += ".json";
if (schemaPath.substring(0, 4) == "http") {
// lance requete http pour recuperer le schema avec un await axios
} else {
if (schemaPath.substring(0, 9) == "adminapi/") {
schemaPath = `${conf.dirapi}/${schemaPath}`;
} else {
schemaPath = `${conf.dirtown}/tribes/${schemaPath}`;
}
if (!fs.existsSync(schemaPath)) {
return {
status: 404,
ref: "Odmdb",
msg: "schemanotfound",
data: { schemaPath, schema: {} },
};
}
res.data.schema = fs.readJsonSync(schemaPath);
if (!res.data.schema.apxid) {
return {
status: 406,
ref: "Odmdb",
msg: "missingprimarykey",
data: {},
};
}
if (res.data.schema.apxidx) {
//add apxidx to apxuniquekey in case not
if (!res.data.schema.apxuniquekey.includes(res.data.schema.apxid)) {
res.data.schema.apxuniquekey.push(res.data.schema.apxid);
res.data.schema = getpath(confschema.schema);
if (Object.keys(res.data.schema).length == 0) {
return {
status: 404,
ref: "Odmdb",
msg: "schemanotfound",
data: { schemaPath, schema: {} },
};
}
//looking for type:object with $ref to load and replace by ref content (ref must be adminapi/ or tribeid/)
//@todo only 1 level $ref if multi level need to rewrite with recursive call
Object.keys(res.data.schema.properties).forEach((p) => {
if (
res.data.schema.properties[p].type == "object" &&
res.data.schema.properties[p]["$ref"]
) {
const subschema = getpath(res.data.schema.properties[p]["$ref"]);
if (Object.keys(res.data.schema).length == 0) {
res.status = 404;
res.msg = "missingref";
res.data.missingref = res.data.schema.properties[p]["$ref"];
return res;
} else {
subschema.description += ` from external schema: ${res.data.schema.properties[p]["$ref"]}`;
res.data.schema.properties[p] = subschema;
}
res.data.schema.apxidx.forEach((idx) => {
if (
idx.objkey &&
!res.data.schema.apxuniquekey.includes(idx.keyval) &&
!idx.objkey.includes(res.data.schema.apxid)
) {
return {
status: 406,
ref: "Odmdb",
msg: "unconsistencyapxidx",
data: {
name: idx.name,
keyval: idx.keyval,
objkey: idx.objkey,
apxid: res.data.schema.apxid,
},
};
}
});
}
if (validschema || 1 == 1) {
// return {status:200, ref, msg} or {status!:200,multimsg:[{ref,msg;data}]}
const check = Checkjson.schema.validation(res.data.schema);
if (check.status != 200) {
res.multimsg = check.multimsg;
res.status = check.status;
});
if (!res.data.schema.apxid) {
return {
status: 406,
ref: "Odmdb",
msg: "missingprimarykey",
data: {},
};
}
if (res.data.schema.apxidx) {
//add apxidx to apxuniquekey in case not
if (!res.data.schema.apxuniquekey.includes(res.data.schema.apxid)) {
res.data.schema.apxuniquekey.push(res.data.schema.apxid);
}
res.data.schema.apxidx.forEach((idx) => {
if (
idx.objkey &&
!res.data.schema.apxuniquekey.includes(idx.keyval) &&
!idx.objkey.includes(res.data.schema.apxid)
) {
return {
status: 406,
ref: "Odmdb",
msg: "unconsistencyapxidx",
data: {
name: idx.name,
keyval: idx.keyval,
objkey: idx.objkey,
apxid: res.data.schema.apxid,
},
};
}
});
}
if (validschema || 1 == 1) {
// return {status:200, ref, msg} or {status!:200,multimsg:[{ref,msg;data}]}
const check = Checkjson.schema.validation(res.data.schema);
if (check.status != 200) {
res.multimsg = check.multimsg;
res.status = check.status;
}
}
return res;
@@ -243,6 +272,32 @@ Odmdb.search = (objectPath, objectName, search) => {
if (schema.status != 200) return schema;
};
Odmdb.r =(objectPathname, apxid, role)=>{
const itmf=`${objectPathname}/itm/${apxid}.json`
if (!fs.existsSync(itmf)){
return {status:404,ref: "Odmdb",msg: "persondoesnotexist",data: {person:apxid}}
}
const getschema = Odmdb.Schema(objectPathname, true);
if (getschema.status != 200) return getschema;
const itm=fs.readJsonSync(itmf);
if (itm.owner && itm.owner == role.xalias) {
role.xprofils.push("owner");
}
const accessright = Odmdb.accessright(
getschema.data.schema.apxaccessrights,
role
);
if (!accessright.R) {
return {status:403, ref:"Odmdb",msg:"forbidden",data: {person:apxid}}
}
const data={}
accessright.R.forEach(p=>{
data[p]=itm[p]
})
return {status:200, ref:"Odmdb",msg:"found",data}
}
/**
* To get an array of item (itm) per primarykey with a list of field
* Object are store in objectPath/objectName/conf.json contain
@@ -252,8 +307,12 @@ Odmdb.search = (objectPath, objectName, search) => {
* @role {xalias,xprofiles} allow to get accessright come from header
* @propertiesfilter (optionnal) key to return for each object (if undefined then return all)
* @Return {status:200; data:{uuid:{data filter by @propertiesfilter},uuid:"notfound"}}
*/
Odmdb.reads = (objectPathname, apxidlist, role, propertiesfilter) => {
ASUP doit être gerer au niveau des view des index
*/
Odmdb.ASUPreads = (objectPathname, apxidlist, role, propertiesfilter) => {
const res = { status: 200, data: {} };
const getschema = Odmdb.Schema(objectPathname, true);
if (getschema.status != 200) return getschema;
@@ -313,6 +372,8 @@ Odmdb.reads = (objectPathname, apxidlist, role, propertiesfilter) => {
* example: {"C":[],"R":[properties list],"U":[properties ist],"D":[]}
*/
Odmdb.accessright = (apxaccessrights, role) => {
//console.log("apxaccessrights",apxaccessrights)
//console.log("role",role)
const accessright = {};
role.xprofils.forEach((p) => {
if (apxaccessrights[p]) {
@@ -324,6 +385,7 @@ Odmdb.accessright = (apxaccessrights, role) => {
...new Set([...accessright[act], ...apxaccessrights[p][act]]),
];
}
//console.log(act,accessright[act])
});
}
});
@@ -340,7 +402,7 @@ Odmdb.accessright = (apxaccessrights, role) => {
* */
Odmdb.cud = (objectPathname, crud, itm, role) => {
Odmdb.cud = (objectPathname, crud, itm, role,runindex=true) => {
const getschema = Odmdb.Schema(objectPathname, true);
if (getschema.status != 200) return getschema;
@@ -352,14 +414,19 @@ Odmdb.cud = (objectPathname, crud, itm, role) => {
data: { missingkey: getschema.data.schema.apxid },
};
}
const pathindex=`${objectPathname}/idx/lst_${getschema.data.schema.apxid}.json`;
if (!fs.existsSync(pathindex)){
fs.outputJSONSync(pathindex,[])
fs.ensureDirSync(`${objectPathname}/itm/`)
fs.ensureDirSync(`${objectPathname}/idx/`)
const existid=fs.existsSync(`${objectPathname}/itm/${itm[getschema.data.schema.apxid]}.json`);
/*const pathindex = `${objectPathname}/idx/lst_${getschema.data.schema.apxid}.json`;
if (!fs.existsSync(pathindex)) {
fs.outputJSONSync(pathindex, []);
fs.ensureDirSync(`${objectPathname}/itm/`);
}
const existid = fs
.readJSONSync(pathindex)
.includes(itm[getschema.data.schema.apxid]);
*/
if (existid && crud == "C") {
return {
status: 406,
@@ -391,9 +458,11 @@ Odmdb.cud = (objectPathname, crud, itm, role) => {
: {};
if (existid && itmold.owner && itmold.owner == role.xalias) {
role.xprofils.push("owner");
} else {
}
if (!existid && crud=="C" && !itm.owner) {
// set owner cause this is a Create
itm.owner = role.xalias;
role.xprofils.push("owner");
}
//get accessright {C:[],R:[],U:[],D:[]} if exist means authorize, if array contain properties (for R and U) right is only allowed on properties
const accessright = Odmdb.accessright(
@@ -415,7 +484,7 @@ Odmdb.cud = (objectPathname, crud, itm, role) => {
}
//delete or save
if (crud == "D") {
itmold["dt_delete"] = dayjs();
itmold["dt_delete"] = dayjs().toISOString();
fs.outputJSONSync(
`${objectPathname}/delitm/${itmold[getschema.data.schema.apxid]}.json`,
itmold
@@ -431,9 +500,9 @@ Odmdb.cud = (objectPathname, crud, itm, role) => {
accessright.U.forEach((p) => {
itmtostore[p] = itm[p];
});
itmtostore.dt_update = dayjs();
itmtostore.dt_update = dayjs().toISOString();
}
if (crud == "C") itmtostore.dt_create = dayjs();
if (crud == "C") itmtostore.dt_create = dayjs().toISOString();
// check consistency of datatostore
const chkdata = Checkjson.schema.data(
getschema.data.schema,
@@ -443,31 +512,109 @@ Odmdb.cud = (objectPathname, crud, itm, role) => {
if (chkdata.status != 200) return chkdata;
if (!getschema.data.schema.apxuniquekey)
getschema.data.schema.apxuniquekey = [];
console.log(`${objectPathname}/itm/${chkdata.data.apxid}.json`);
console.log(chkdata.data.itm);
fs.outputJSONSync(
`${objectPathname}/itm/${chkdata.data.apxid}.json`,
chkdata.data.itm
);
}
console.log("getschema", getschema);
//update idx
Odmdb.idxfromitm(
objectPathname,
crud,
itm,
itmold,
[],
getschema.data.schema
);
getschema.data.conf.lastupdatedata = dayjs();
//console.log("getschema", getschema);
//rebuild index if requested
console.log("runidx",runindex)
console.log(objectPathname)
if (runindex) Odmdb.runidx(objectPathname,getschema.data.schema);
getschema.data.conf.lastupdatedata = dayjs().toISOString();
fs.outputJSONSync(`${objectPathname}/conf.json`, getschema.data.conf);
return {
status: 200,
ref: "Odmdb",
msg: "cudsuccessfull",
data: { itm: chkdata.data.itm },
data: {},
};
};
/**
* apxidx:[list of index file
* { name: string, // filename in /idx/name.json
* type: array (list of unique value found in keyval), example: [val1, val2, val3,....] eventualy filter
* view (only unique key and object with objkey eventually filter by filter ) example: {idalias:{itm}}
* distribution (=> {val of keyval:[apxid itm value]} with )
* keyval:string, // a propertie of type string or array
* objkeys:[] and array of properties if empty [] means all properties, if 1 element => an array of this properties
* filter : an eval function base on itm data (itm.key) that return true (return data item) or false (ignore item)
*
*/
Odmdb.runidx = (objectPathname, schema) => {
console.log(`idx for ${objectPathname}`);
if (!schema || !schema.apxid) {
const getschema = Odmdb.Schema(objectPathname, true);
if (getschema.status != 200) return getschema;
schema = getschema.data.schema;
}
const ventil = {};
schema.apxidx.forEach((id) => {
ventil[id.name] = id;
ventil[id.name].isunique = schema.apxuniquekey.includes(id.keyval);
ventil[id.name].nbobjkey = id.objkey ? id.objkey.length : 0;
ventil[id.name].keyvaltype = schema.properties[id.keyval].type;
ventil[id.name].filter = id.filter ? id.filter.replace(/;/g, "") : ""; // check integrity of string
ventil[id.name].data = ventil[id.name].type == "array" ? [] : {};
});
glob.sync(`${objectPathname}/itm/*.json`).forEach((i) => {
const itm = fs.readJSONSync(i);
Object.keys(ventil).forEach((n) => {
let keep = true;
if (ventil[n].filter != "") {
try {
keep = eval(ventil[n].filter);
} catch (err) {
keep = false;
}
}
if (keep && ventil[n].type == "array" && itm[ventil[n].keyval]) {
if (ventil[n].keyvaltype == "array") {
itm[ventil[n].keyval].forEach((v) => {
if (!ventil[n].data.includes(v)) ventil[n].data.push(v);
});
} else {
if (!ventil[n].data.includes(itm[ventil[n].keyval]))
ventil[n].data.push(itm[ventil[n].keyval]);
}
}
if (
keep &&
ventil[n].type == "view" &&
ventil[n].isunique &&
itm[ventil[n].keyval]
) {
if (ventil[n].nbobjkey == 0)
ventil[n].data[itm[ventil[n].keyval]] = itm;
if (ventil[n].nbobjkey == 1)
ventil[n].data[itm[ventil[n].keyval]] = itm[ventil[n].objkey[0]];
if (ventil[n].nbobjkey > 1) {
const objdata={}
Object.keys(itm).forEach(k=>{
if (ventil[n].objkey.includes(k)) objdata[k]=itm[k];
})
ventil[n].data[itm[ventil[n].keyval]]=objdata
}
}
if (keep && ventil[n].type == "distribution" && itm[ventil[n].keyval] ) {
const listval = (ventil[n].keyvaltype=="string")? [itm[ventil[n].keyval]] : itm[ventil[n].keyval];
// itm[ventil[n].keyval] is an array
listval.forEach(val=>{
if (!ventil[n].data[val]) ventil[n].data[val] = [];
ventil[n].data[val].push(itm[schema.apxid])
});
}
});
});
Object.keys(ventil).forEach((n)=>{
//console.log(`${objectPathname}/idx/${ventil[n].name}.json`)
fs.outputJSON(`${objectPathname}/idx/${ventil[n].name}.json`,ventil[n].data);
})
};
/**
* create/update idx from itm(s)
*
@@ -490,7 +637,7 @@ Odmdb.cud = (objectPathname, crud, itm, role) => {
* Odmdb.idxfromitm('.../tribes/ndda/pagans',"I",{},{},[{ name:"publickey_alias",keyval:"publickey",objkey:["alias"]}], {pagans schema})
*
*/
Odmdb.idxfromitm = (objectPathname, crud, itm, itmold, idxs = [], schema) => {
Odmdb.ASUPidxfromitm = (objectPathname, crud, itm, itmold, idxs = [], schema) => {
console.log(`idxfromitem for ${objectPathname} action:${crud}`);
if (!schema || !schema.apxid) {
const getschema = Odmdb.Schema(objectPathname, true);
@@ -601,8 +748,8 @@ Odmdb.idxfromitm = (objectPathname, crud, itm, itmold, idxs = [], schema) => {
if (crud != "I") {
//update lastupdatedata to inform something change
const confschema = fs.readJSONSync(`${objectPathname}/conf.json`);
confschema.lastupdatedata = dayjs();
fs.outputJSONSync(`${objectPathname}/conf.json`, getschema.data.conf);
confschema.lastupdatedata = dayjs().toISOString();
fs.outputJSONSync(`${objectPathname}/conf.json`, confschema);
}
return { status: 200, ref: "Odmdb", msg: "successreindex", data: {} };
};
@@ -652,7 +799,7 @@ Odmdb.updatefromidxall = (objectname, idxname, data, lastupdate) => {
//check if it miss id in fresher update means conf.lastupdate will be now to indicate
Object.keys(localidx).forEach((id) => {
if (!data[id]) {
conflastupdate = dayjs();
conflastupdate = dayjs().toISOString();
}
});
// update the object files

View File

@@ -3,13 +3,11 @@ const path = require("path");
const dayjs = require("dayjs");
const fs = require("fs-extra");
const axios = require("axios");
const Mustache = require('mustache');
const openpgp = require("openpgp");
const Notifications = require("../models/Notifications.js");
const Odmdb = require("../models/Odmdb.js");
/*if (fs.existsSync("../../nationchains/tribes/conf.json")) {
conf = require("../../nationchains/tribes/conf.json");
}*/
const conf = require(`${process.env.dirtown}/conf.json`);
/**
@@ -37,6 +35,7 @@ Pagans.logout = (alias, tribe, xdays, xhash) => {
//max filename in ext4: 255 characters
tmpfs += `_${xhash.substring(150, 150 + tmpfs.length - 249)}.json`;
fs.remove(tmpfs);
console.log('logout token',tmpfs)
return { status: 200, ref: "Pagans", msg: "logout" };
};
@@ -47,7 +46,7 @@ Pagans.logout = (alias, tribe, xdays, xhash) => {
*
**/
Pagans.getalias = (alias) => {
//bypass Odmdb cause all is public
//bypass Odmdb cause all is public save ressources
if (fs.existsSync(`${conf.dirapi}/nationchains/pagans/itm/${alias}.json`)) {
return {
status: 200,
@@ -67,106 +66,7 @@ Pagans.getalias = (alias) => {
}
};
/**
* @param {string} alias that exist
* @param {string} tribeId that exist with a person alias
* @return {object} { status: 200, ref:"pagans",msg:"personexist",data: { person } }
* { status: 404, ref:"pagans",msg:"persondoesnotexist",data: { person } }
*
**/
Pagans.getperson = (tribeid, alias, role) => {
const objlst = Odmdb.reads(
`${conf.dirtown}/tribes/${tribeid}/persons`,
[alias],
role
);
if (objlst.data[alias] == "notfound") {
return {
status: 404,
ref: "Pagans",
msg: "persondoesnotexist",
data: { alias, tribeid },
};
} else {
return {
status: 200,
ref: "Pagans",
msg: "personexist",
data: objlst.data[alias],
};
}
};
Pagans.create = (objpagan, role) => {
/**
* @param {object} objpagan {alias,publickey} a unique alias/publickey that identify an identity
* @param {array} role {xalias,xprofils} requester and list of profil
* @return {object} { status: 200, data: { alias, publicKey } }
* xhash was checked by isauthenticated
* @todo use Odmdb to add a pagan
*/
return Odmdb.cud(`${conf.dirapi}/nationchains/pagans`, "C", objpagan, role);
/*
let apxpagans = {};
if (fs.existsSync(`${conf.dirapi}/nationchains/pagans/idx/alias_all.json`)) {
apxpagans = fs.readJsonSync(
`${conf.dirapi}/nationchains/pagans/idx/alias_all.json`
);
}
if (apxpagans[objpagan.alias]) {
return { status: 409, ref: "Pagans", msg: "aliasexist", data: { alias } };
}
apxpagans[objpagan.alias] = { alias, publicKey };
fs.outputJsonSync(
`${conf.dirapi}/nationchains/pagans/idx/alias_all.json`,
apxpagans
);
fs.outputJsonSync(`${conf.dirapi}/nationchains/pagans/itm/${alias}.json`, {
alias,
publicKey,
});
return {
status: 200,
ref: "Pagans",
msg: "identitycreate",
data: { alias, publicKey },
};
*/
};
/**
* @Param {string} alias pagan unique id
* @Param {string} tribeid tribe id in this town
* @Param {object} persondata that respect /nationchains/schema/person.json + nationchains/tribe/tribeid/schema/personextented.json
* @return create or update a person /tribe/tribeid/person/alias.json
* todo later use Odmdb ans schema person to manage this
*/
Pagans.personupdate = (tribeid, alias, personupdate, role) => {
const personinit = {
alias: alias,
dt_create: dayjs(),
profils: ["person"],
};
const personfile = `${process.env.dirtown}/tribes/${tribeid}/person/itm/${alias}.json`;
const persondata = fs.existsSync(personfile)
? fs.readJSONSync(personfile)
: personinit;
persondata.dt_update = dayjs();
Object.keys(personupdate).forEach((d) => {
persondata[d] = personupdate[d];
});
//const checkjson= Checkjson.schema.data = (fs.readJsonSync(`${conf.dirapi}/nationchains/schema/person.json`, person, false)
// if checkjson.status==200 create /update with odmdb to update index data
// see odmdb that did all and return standard message
fs.outputJSONSync(personfile, persondata, { space: 2 });
return {
status: 200,
ref: "Pagans",
msg: "successfullupdate",
data: { alias: alias, tribeid: tribeid },
};
};
/**
* Send email with alias's keys to email or person alias person.recovery.email
*
@@ -183,38 +83,38 @@ Pagans.personupdate = (tribeid, alias, personupdate, role) => {
Pagans.sendmailkey = (
alias,
privatekey,
tribeid,
tribe,
passphrase,
publickey,
email
email,
lg
) => {
const person = { alias, privatekey, tribeid };
const person = { alias, privatekey, tribe };
console.log(
alias,
"-",
privatekey,
privatekey.substring(0,10),
"-",
tribeid,
tribe,
"-",
passphrase,
"-",
publickey,
publickey.substring(0,10),
"-",
email
);
if (!publickey || !email || !passphrase || !privatekey) {
const personfile = `${process.env.dirtown}/tribes/${tribeid}/person/itm/${alias}.json`;
const persondata = fs.existsSync(personfile)
? fs.readJsonSync(personfile)
: {};
if (persondata.length == 0) {
if (!publickey || !email || !privatekey) {
const personfile = `${process.env.dirtown}/tribes/${tribe}/objects/persons/itm/${alias}.json`;
if (!fs.existsSync(personfile)) {
return {
status: 404,
ref: "Pagans",
msg: "persondoesnotexist",
data: { alias, tribeid },
data: { alias, tribe },
};
}
const persondata= fs.readJsonSync(personfile);
person.email = persondata.recoveryauth.email;
person.publickey = persondata.recoveryauth.publickey;
person.privatekey = persondata.recoveryauth.privatekey;
@@ -224,48 +124,30 @@ Pagans.sendmailkey = (
person.passphrase = passphrase;
person.publickey = publickey;
}
person.avecpassphrase=(person.passphrase!="")
console.log("person:", person);
const tplemail = require(`${conf.dirapi}/adminapi/template/createidentity_${lg}.js`)
//feedback.withemail = true;
//feedback.email = email;
//feedback.privatekey = privatekey;
//feedback.passphrase = passphrase;
const mailidentity = {
subjecttpl: "Information pour l'alias: {{alias}}",
htmltpl:
"<h1>Votre identité {{alias}} via {{tribeid}}</h1><p>Passphrase:</p></p><p>{{{passphrase}}</p><p>Cle public:</p><p>{{{publickey}}</p><p>Cle privée</p><p>{{{privatekey}}</p>",
texttpl:
"Votre identité {{alias}}\nPassphrase:\n{{{passphrase}}\nCle public:\n{{{publickey}}\nCle privée\n{{{privatekey}}",
filelist: [],
};
person.message
const maildata = {
To: person.email,
subject: Mustache.render(mailidentity.subject, person),
htmlpart: Mustache.render(mailidentity.htmltpl, person),
textpart: Mustache.render(mailidentity.texttpl, person),
filelist: [],
to: person.email,
subject: Mustache.render(tplemail.subject, person),
html: Mustache.render(tplemail.html, person),
text: Mustache.render(tplemail.text, person),
attachments: [
{
filename:`${person.alias}_publickey.txt`,
content: person.publickey,
contentType:"text/plain"
},
{
filename:`${person.alias}_privatekey.txt`,
content: person.privatekey,
contentType:"text/plain"
}
]
};
fs.outputFileSync(
`${conf.dirtown}/tmp/${person.alias}_privatekey.txt`,
person.privatekey,
"utf8"
);
maildata.filelist.push({
filename: "${person.alias}_privatekey.txt",
pathfile: `${conf.dirtown}/tmp/${person.alias}_privatekey.txt`,
});
fs.outputFileSync(
`${conf.dirtown}/tmp/${person.alias}_publickey.txt`,
person.publickey,
"utf8"
);
maildata.filelist.push({
filename: "${person.alias}_publickey.txt",
pathfile: `${conf.dirtown}/tmp/${person.alias}_publickey.txt`,
});
//fs.readJSONSync('${conf.dirapi}/api/')
return Notifications.sendmail(maildata, tribeid);
return Notifications.sendmail(maildata, tribe);
};
Pagans.authenticatedetachedSignature = async (
@@ -306,15 +188,6 @@ Pagans.authenticatedetachedSignature = async (
}
};
/**
* todo recuperer tous les tokens et les distribuer à la town
* @param {string} alias
*/
Pagans.deletealias = (alias) => {
// check if alias is used in the blockchain
// if not then delete itm pagan alias => this means that alias is now available for someone else
};
Pagans.deleteperson = (alias, tribeId) => {};
Pagans.keyrecovery = (tribeid, email) => {
glob
.GlobSync(`${conf.dirtown}/tribes/${tribeId}/Person/*.json`)

View File

@@ -2,5 +2,6 @@
"validcheck":"Your data are valid",
"typedoesnnotexistinschema":"This type in your propertie is not manage by Checkjson.js",
"dataerrpropertie":"Check your data that not fit your schema rules propertie",
"dataerrpropertiesrequired":"This propertie is required and not present in your data"
"dataerrpropertiesrequired":"This propertie is required and not present in your data",
"externalrefnotload":"You have in your schema a relative sub schema, you have to load it before"
}

View File

@@ -1,14 +1,19 @@
{
"alreadyexist": "Un object {{objectname}} avec la clé {{key}} existe déjà avec {{val}}",
"doesnotexist": "L'object {{objectname}} avec la clé {{key}} ,'existe pas avec {{val}}",
"doesnotexist": "L'object {{objectname}} n'existe pas avec {{key}}:{{val}}",
"getschema": "Schema {{{conf.name}}}",
"schemanotfound": "Schema introuvable dans {{{schemaPath}}}",
"pathnamedoesnotexist": "Le repertoire n'existe pas {{{indexpath}}}",
"objectfiledoesnotexist": "Le fichier n'existe pas {{{objectpath}}}",
"cudsuccessfull": "Mise à jour effectuée avec succés",
"successfulcreatewithoutemail":"Créer sans envoi d'email",
"successfulcreatewitemail":"Créer vous allez recevoir un email",
"missingprimarykey": "Il manque une clé primaire apxid pour stocker et identifier les objects",
"unconsistencyapxidx": "L'index {{name}} doit contenir en objkey au moins {{apxid}} car keyval n'est pas unique",
"profilnotallow": "Vous n'avez pas le profil de {{profils}}, cette action n'est pas authorisée",
"successreindex": "Objet reindexé à partir des items, vos index sont à jour",
"indexexist":"L'indexe existe"
"indexexist":"L'indexe existe",
"typenotavailable":"Le type: {{type}} pour la propertie : {{propertie}} de l'object :{{objectPathname}} n'est pas pris en compte pour l'indexation",
"objectslist":"Liste des objects d'apxtrib et de {{tribe}}",
"errordelete":"Desolé impossible de supprimer ce compte"
}

View File

@@ -1,13 +1,12 @@
{
"aliasexist": "Cet alias {{data.alias}} existe",
"emailerr": "Verifier votre email",
"aliasexist": "Cet alias {{alias}} existe",
"emailerr": "Verifier votre email {{email}}",
"aliasorprivkeytooshort": "Vérifiez votre alias et votre clé privée",
"aliasdoesnotexist": "Cet alias {{data.alias}} n'existe pas",
"personexist": "Cette personne {{data.alias}} existe pour {{data.tribeid}}",
"persondoesnotexist": "Cette personne {{data.alias}} n'existe pas pour {{data.tribeid}}",
"successfullcreate": "La création de cette identité {{data.alias}} a été un succès. {{#data.withemail}} Un email a été envoyé à {{data.email}}, si vous ne le recevez pas, veuillez télécharger vos clés avant de quitter cette page.{{/data.withemail}}",
"successfulcreatewithoutemail": "La creation de data.alias}} a été un succès. Aucun email ,'a été envoyé, verifier bien que vos clés sont bien sauvegardé de votre coté",
"successfulluppdate": "Votre alias en tant que Personne est maintenant mis à jour dans {{data.tribeid}}",
"aliasdoesnotexist": "Cet alias {{alias}} n'existe pas",
"personexist": "Cette personne {{alias}} existe pour {{tribeid}}",
"persondoesnotexist": "Cette personne {{alias}} n'existe pas pour {{tribeid}}",
"successfulcreate":"Votre identité {{alias}} a été créee à partir de vos clés.{{#emailsent}} Un email a été envoyé à {{email}}, si vous ne le recevez pas, veuillez télécharger vos clés avant de quitter cette page.{{/emailsent}} {{#emailerror}}Un problème lors de l'envoi sur {{email}} s'est produit. Veuillez télécharger vos clés avant de quitter cette page.{{/emailerror}}<br>{{#createperson}}Félicitation Votre compte est ouvert chez {{tribe}}. {{/createperson}} {{#errorperson}}Attention votre identité n'a pas été associée à {{tribe}}. Vous pourrez rejoindre {{tribe}} dans l'écran suivant. {{/errorperson}}",
"successfulluppdate": "Votre alias en tant que Personne est maintenant mis à jour dans {{tribeid}}",
"errcreate": "Desolé, un probléme inconnu empeche la creation",
"logout": "Votre token a été supprimé du server"
}