major update
This commit is contained in:
@@ -11,7 +11,7 @@ Checkjson.schema = {};
|
||||
Checkjson.schema.properties = {};
|
||||
Checkjson.schema.properties.type = {};
|
||||
Checkjson.schema.properties.type.string = (str) => typeof str === "string";
|
||||
Checkjson.schema.properties.type.array = (val)=> Array.isArray(val);
|
||||
Checkjson.schema.properties.type.array = (val) => Array.isArray(val);
|
||||
Checkjson.schema.properties.type.number = (n) => typeof n === "number";
|
||||
Checkjson.schema.properties.type.boolean = (n) => typeof n === "boolean";
|
||||
Checkjson.schema.properties.type.integer = (n) =>
|
||||
@@ -47,19 +47,44 @@ Checkjson.schema.properties.range = (
|
||||
};
|
||||
Checkjson.schema.properties.pattern = (str, pattern) => {
|
||||
try {
|
||||
pattern= new RegExp(pattern);
|
||||
pattern = new RegExp(pattern);
|
||||
} catch (e) {
|
||||
console.log('err pattern in checkjon',pattern);
|
||||
console.log("err pattern in checkjon", pattern);
|
||||
return false;
|
||||
}
|
||||
return pattern.test(str);
|
||||
};
|
||||
Checkjson.schema.properties.enum = (str, enumvalues) =>
|
||||
typeof str === "string" && enumvalues.includes(str);
|
||||
Checkjson.schema.properties.enum = (str, enumvalues) => {
|
||||
if (Array.isArray(enumvalues)) {
|
||||
return typeof str === "string" && enumvalues.includes(str);
|
||||
} else if (tribeId) {
|
||||
//enumvalues is a reference of objectname.key
|
||||
const { tribeId, obj, keyid } = enumvalues.split(".");
|
||||
return fs.existsSync(
|
||||
`${conf.dirtown}/tribes/${tribeId}/schema/${obj}/itm/${keyid}.json`
|
||||
);
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
// to check a value for a pattern
|
||||
// Checkjson.schema.properties.pattern(value, properties[p].pattern)
|
||||
/**
|
||||
*
|
||||
* @param {string} str to test
|
||||
* @param {string} format keyworkd existing in Checkjson.schema.properties.format
|
||||
* @return null if format does not exist, true or false
|
||||
*/
|
||||
Checkjson.testformat=(str, format)=>{
|
||||
if (!Checkjson.schema.properties.format[format]) { return null}
|
||||
return Checkjson.schema.properties.pattern(str, Checkjson.schema.properties.format[format])
|
||||
|
||||
}
|
||||
// see format https://json-schema.org/understanding-json-schema/reference/string.html#format
|
||||
// to check a just value with a format use Checkjson.testformat=(value, format)
|
||||
Checkjson.schema.properties.format = {
|
||||
"date-time": /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-5]\d\.\d{1,3}/,
|
||||
stringalphaonly:/^[A-Za-z0-9]{3,}$/,
|
||||
stringalphaonly: /^[A-Za-z0-9]{3,}$/,
|
||||
time: /[0-2]\d:[0-5]\d:[0-5]\d\.\d{1,3}/,
|
||||
date: /\d{4}-[01]\d-[0-3]\d/,
|
||||
duration: / /,
|
||||
@@ -79,11 +104,16 @@ Checkjson.schema.properties.format = {
|
||||
password:
|
||||
/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[$@$!%*?&.])[A-Za-z\d$@$!%*?&.{}:|\s]{8,}/,
|
||||
postalcodefr: /(^\d{5}$)|(^\d{5}-\d{4}$)/,
|
||||
pgppublickey:
|
||||
/^-----BEGIN PGP PUBLIC KEY BLOCK-----(\n|\r|\r\n)(\n|\r|\r\n)([0-9a-zA-Z\+\/=]*(\n|\r|\r\n))*-----END PGP PUBLIC KEY BLOCK-----(\n|\r|\r\n)?$/gm,
|
||||
pgpprivatekey:
|
||||
/^-----BEGIN PGP PRIVATE KEY BLOCK-----(\n|\r|\r\n)(\n|\r|\r\n)([0-9a-zA-Z\+\/=]*(\n|\r|\r\n))*-----END PGP PRIVATE KEY BLOCK-----(\n|\r|\r\n)?$/gm,
|
||||
};
|
||||
Checkjson.schema.properties.default
|
||||
Checkjson.schema.properties.default;
|
||||
Checkjson.schema.validation = (schema) => {
|
||||
/*validate a schema structure*/
|
||||
const res = { status: 200, err: [] };
|
||||
const multimsg = [];
|
||||
const res = {};
|
||||
if (schema.properties) {
|
||||
Object.keys(schema.properties).forEach((p) => {
|
||||
const properties = schema.properties;
|
||||
@@ -92,10 +122,10 @@ Checkjson.schema.validation = (schema) => {
|
||||
typeof properties[p].type === "string" &&
|
||||
!Checkjson.schema.properties.type[properties[p].type]
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"schemaerrtypedoesnotexist",
|
||||
data: {propertie:p,type:properties[p].type}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "schemaerrtypedoesnotexist",
|
||||
data: { propertie: p, type: properties[p].type },
|
||||
});
|
||||
}
|
||||
if (
|
||||
@@ -105,10 +135,10 @@ Checkjson.schema.validation = (schema) => {
|
||||
) {
|
||||
properties[p].type.forEach((tp) => {
|
||||
if (!Checkjson.schema.properties.type[tp])
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"schemaerrtypedoesnotexist",
|
||||
data: {propertie:p,type:properties[p].type}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "schemaerrtypedoesnotexist",
|
||||
data: { propertie: p, type: properties[p].type },
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -116,26 +146,41 @@ Checkjson.schema.validation = (schema) => {
|
||||
properties[p].format &&
|
||||
!Checkjson.schema.properties.format[properties[p].format]
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"schemaerrformatdoesnotexist",
|
||||
data: {propertie:p,format:properties[p].format}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "schemaerrformatdoesnotexist",
|
||||
data: { propertie: p, format: properties[p].format },
|
||||
});
|
||||
}
|
||||
if (properties[p].enum && !Array.isArray(properties[p].enum)) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"schemaerrenumnotarray",
|
||||
data: {propertie:p,enum:properties[p].enum}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "schemaerrenumnotarray",
|
||||
data: { propertie: p, enum: properties[p].enum },
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
// 406 means not acceptable
|
||||
if (res.err.length > 0) res.status = 406;
|
||||
if (multimsg.length > 0) {
|
||||
res.status = 406;
|
||||
res.multimsg = multimsg;
|
||||
} else {
|
||||
res.status = 200;
|
||||
res.ref = "Checkjson";
|
||||
res.msg = "validcheck";
|
||||
}
|
||||
return res;
|
||||
};
|
||||
|
||||
/**
|
||||
* Check data with a schema
|
||||
*
|
||||
* @param {object} schema a json schema
|
||||
* @param {*} data some data to check using schema
|
||||
* @param {*} withschemacheck boolean that force a schema check (usefull on modification schema)
|
||||
* @returns {status: 200, ref:"Checkjson", msg:"validcheck", data:{itm:object}}
|
||||
* {status:417, multimsg:[{re,msg,data}],data:{itm:object}}
|
||||
*/
|
||||
Checkjson.schema.data = (schema, data, withschemacheck) => {
|
||||
/* validate a data set with a schema in a context ctx */
|
||||
/*
|
||||
@@ -148,7 +193,11 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
|
||||
const validschema = Checkjson.schema.validation(schema);
|
||||
if (validschema.status != 200) return validschema;
|
||||
}
|
||||
const res = { status: 200, err: [] };
|
||||
const multimsg = [];
|
||||
const res = {
|
||||
status: 200,
|
||||
data: { itm: data },
|
||||
};
|
||||
if (schema.properties) {
|
||||
const properties = schema.properties;
|
||||
Object.keys(properties).forEach((p) => {
|
||||
@@ -164,40 +213,61 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
|
||||
if (Checkjson.schema.properties.type[typ](data[p])) valid = true;
|
||||
});
|
||||
if (!valid)
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertie",
|
||||
data: {key:p,value:data[p]}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertie",
|
||||
data: { key: p, value: data[p] },
|
||||
});
|
||||
|
||||
if (
|
||||
properties[p].minLength &&
|
||||
!Checkjson.schema.properties.minLength(data[p], properties[p].minLength)
|
||||
!Checkjson.schema.properties.minLength(
|
||||
data[p],
|
||||
properties[p].minLength
|
||||
)
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertie",
|
||||
data:{key:p,value:data[p],minLength:properties[p].minLength}
|
||||
});
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertie",
|
||||
data: {
|
||||
key: p,
|
||||
value: data[p],
|
||||
minLength: properties[p].minLength,
|
||||
},
|
||||
});
|
||||
}
|
||||
if (
|
||||
properties[p].maxLength &&
|
||||
!Checkjson.schema.properties.maxLength(data[p], properties[p].maxLength)
|
||||
!Checkjson.schema.properties.maxLength(
|
||||
data[p],
|
||||
properties[p].maxLength
|
||||
)
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertie",
|
||||
data:{key:p,value:data[p],maxLength:properties[p].maxLength}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertie",
|
||||
data: {
|
||||
key: p,
|
||||
value: data[p],
|
||||
maxLength: properties[p].maxLength,
|
||||
},
|
||||
});
|
||||
}
|
||||
if (
|
||||
properties[p].multipleOf &&
|
||||
!Checkjson.schema.properties.multipleOf(data[p], properties[p].multipleOf)
|
||||
!Checkjson.schema.properties.multipleOf(
|
||||
data[p],
|
||||
properties[p].multipleOf
|
||||
)
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertie",
|
||||
data:{key:p,value:data[p],multipleOf:properties[p].multipleOf}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertie",
|
||||
data: {
|
||||
key: p,
|
||||
value: data[p],
|
||||
multipleOf: properties[p].multipleOf,
|
||||
},
|
||||
});
|
||||
}
|
||||
if (
|
||||
@@ -216,10 +286,17 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
|
||||
properties[p].exclusiveMaximum
|
||||
)
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertie",
|
||||
data:{key:p,value:data[p],minimum:properties[p].minimum,maximum:properties[p].maximum,exclusiveMinimum:properties[p].exclusiveMinimum,exclusiveMaximum:properties[p].exclusiveMaximum}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertie",
|
||||
data: {
|
||||
key: p,
|
||||
value: data[p],
|
||||
minimum: properties[p].minimum,
|
||||
maximum: properties[p].maximum,
|
||||
exclusiveMinimum: properties[p].exclusiveMinimum,
|
||||
exclusiveMaximum: properties[p].exclusiveMaximum,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -227,10 +304,10 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
|
||||
properties[p].enum &&
|
||||
!Checkjson.schema.properties.enum(data[p], properties[p].enum)
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertie",
|
||||
data:{key:p,value:data[p],enumlst:properties[p].enum}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertie",
|
||||
data: { key: p, value: data[p], enumlst: properties[p].enum },
|
||||
});
|
||||
}
|
||||
if (properties[p].format) {
|
||||
@@ -241,22 +318,32 @@ Checkjson.schema.data = (schema, data, withschemacheck) => {
|
||||
properties[p].pattern &&
|
||||
!Checkjson.schema.properties.pattern(data[p], properties[p].pattern)
|
||||
) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertie",
|
||||
data:{key:p,value:data[p],pattern:properties[p].pattern}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertie",
|
||||
data: { key: p, value: data[p], pattern: properties[p].pattern },
|
||||
});
|
||||
}
|
||||
} else if (schema.required && schema.required.includes(p)) {
|
||||
res.err.push({
|
||||
ref:"Checkjson",
|
||||
msg:"dataerrpropertierequired",
|
||||
data:{key:p,required:true}
|
||||
multimsg.push({
|
||||
ref: "Checkjson",
|
||||
msg: "dataerrpropertierequired",
|
||||
data: { key: p, required: true },
|
||||
});
|
||||
}
|
||||
});
|
||||
} //end properties
|
||||
if (schema.apxid) {
|
||||
res.data.apxid = data[schema.apxid];
|
||||
}
|
||||
if (multimsg.length > 0) {
|
||||
res.status = 417;
|
||||
res.multimsg = multimsg;
|
||||
} else {
|
||||
res.status = 200;
|
||||
res.ref = "Checkjson";
|
||||
res.msg = "validcheck";
|
||||
}
|
||||
if (res.err.length > 0) res.status = 417;
|
||||
return res;
|
||||
};
|
||||
if (typeof module !== "undefined") module.exports = Checkjson;
|
||||
|
@@ -1,7 +1,10 @@
|
||||
const glob = require("glob");
|
||||
const path = require("path");
|
||||
const fs = require("fs-extra");
|
||||
|
||||
const axios = require("axios");
|
||||
//const smtp = require("smtp-client");
|
||||
const nodemailer = require("nodemailer");
|
||||
const conf = require(`${process.env.dirtown}/conf.json`);
|
||||
/**
|
||||
* To manage any communication between Pagan
|
||||
* mayor druid emailing/sms/paper from tribe register smtp, simcard, mail api to Person(s) / Pagan(s)
|
||||
@@ -11,10 +14,160 @@ const fs = require("fs-extra");
|
||||
|
||||
const Notifications = {};
|
||||
|
||||
Notifications.send = (data) => {
|
||||
const ret = {};
|
||||
console.log("TODO dev notification emailing");
|
||||
return ret;
|
||||
|
||||
Notifications.sendsms = async (data, tribeId) => {
|
||||
/**
|
||||
* Never use need wallet in mailjet to test
|
||||
* To set up with mailjet see https://dev.mailjet.com/sms/guides/send-sms-api/#authentication
|
||||
*
|
||||
* @param {string} data.To a phone number with international +3360101010101
|
||||
* @param {string} data.Text text to send
|
||||
*
|
||||
* a conf.sms with {url:"smsurl", Token:"", From:""}
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
if (!conf.sms) {
|
||||
return {
|
||||
status: 412,
|
||||
ref: "Notifications",
|
||||
msg: "missingconf",
|
||||
tribe: tribeId,
|
||||
};
|
||||
}
|
||||
let missingk = [][("To", "Text")].forEach((k) => {
|
||||
if (!data[k]) {
|
||||
missingk.push(k);
|
||||
}
|
||||
});
|
||||
if (missingk.lenght > 0) {
|
||||
return {
|
||||
status: 428,
|
||||
ref: "Notifications",
|
||||
msg: "missingdata",
|
||||
missingk: missingk,
|
||||
};
|
||||
}
|
||||
let confsms= conf.sms;
|
||||
if (
|
||||
fs.existsSync(
|
||||
`${process.env.dirtown}/tribes/itm/${req.session.header.xtribe}.json`
|
||||
)
|
||||
) {
|
||||
const conftrib = fs.readJSONSync(
|
||||
`${process.env.dirtown}/tribes/itm/${req.session.header.xtribe}.json`
|
||||
);
|
||||
if (conftrib.sms) confsms = conftrib.sms;
|
||||
}
|
||||
data.From=confsms.From
|
||||
const sendsms= await axios.post(confsms.url,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${confsms.MJ_TOKEN}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(data),
|
||||
});
|
||||
if (sendsms.status==200){
|
||||
return {status:200,ref:"Notifications",msg:"successfullsentsms"};
|
||||
}else{
|
||||
return {status:sendsms.status, ref:"Notifications",msg:"errsendsms",err:sendsms.data}
|
||||
}
|
||||
|
||||
/* si tout se passe bien:
|
||||
{
|
||||
"From": "MJPilot",
|
||||
"To": "+33600000000",
|
||||
"Text": "Have a nice SMS flight with Mailjet !",
|
||||
"MessageId": "2034075536371630429",
|
||||
"SmsCount": 1,
|
||||
"CreationTS": 1521626400,
|
||||
"SentTS": 1521626402,
|
||||
"Cost": {
|
||||
"Value": 0.0012,
|
||||
"Currency": "EUR"
|
||||
},
|
||||
"Status": {
|
||||
"Code": 2,
|
||||
"Name": "sent",
|
||||
"Description": "Message sent"
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
};
|
||||
|
||||
Notifications.sendmail = async (data, tribe) => {
|
||||
/**
|
||||
* @param {string} data.From an email authorized by smtp used priority from header xtribe
|
||||
* @param {string} data.To list of email separate by ,
|
||||
* @param {string} data.subject
|
||||
* @param {string} data.html
|
||||
* @param {string} data.text
|
||||
* @param {string} data.Cc list of email in copy
|
||||
* @param {string} data.Bcc list of email in hidden copy
|
||||
* @param {string} data.filelist an array of object {filename:"",pathfile:"",filetype:""} pathfile to attach as file name of type:filetype "filename" to this email
|
||||
* example of filetype : "text/plain", "text/csv", image/gif", "application/json", "application/zip"
|
||||
*
|
||||
* @example data
|
||||
* {"From":"wall-ants.ndda.fr",
|
||||
* "To":"wall-ants.ndda.fr",
|
||||
* "subject":"Test",
|
||||
* "html":"<h1>test welcome</h1>",
|
||||
* "text":"test welcome",
|
||||
* "attachments":[{filename:"text.txt",pathfile:"/media/phil/textA.txt","contenttype":"text/plain"}]
|
||||
* }
|
||||
* @return {object} { status: 200, ref:"pagans",msg:"aliasexist",data: { alias, publicKey } }
|
||||
*
|
||||
*
|
||||
*/
|
||||
if (!conf.smtp) {
|
||||
return {
|
||||
status: 412,
|
||||
ref: "Notifications",
|
||||
msg: "missingconf",
|
||||
tribe: tribeId,
|
||||
};
|
||||
}
|
||||
let missingk = [];
|
||||
["from", "to", "subject", "html", "text"].forEach((k) => {
|
||||
if (!data[k]) {
|
||||
missingk.push(k);
|
||||
}
|
||||
});
|
||||
if (missingk.lenght > 0) {
|
||||
return {
|
||||
status: 428,
|
||||
ref: "Notifications",
|
||||
msg: "missingdata",
|
||||
missingk: missingk,
|
||||
};
|
||||
}
|
||||
const conftribfile=`${process.env.dirtown}/tribes/itm/${tribe}.json`;
|
||||
const confsmtp =(fs.existsSync(conftribfile))? fs.readJSONSync(conftribfile).smtp : conf.smtp;
|
||||
|
||||
//const client = smtp.connect(confsmtp);
|
||||
const transporter = await nodemailer.createTransport(confsmtp);
|
||||
//@todo add attachments management
|
||||
let missingfile=[]
|
||||
if (missingfile.lenght > 0)
|
||||
return {
|
||||
status: 428,
|
||||
ref: "Notifications",
|
||||
msg: "missingfile",
|
||||
missingfile: missingfile,
|
||||
};
|
||||
try {
|
||||
// Send the email
|
||||
//const res = await client.sendMail(data)
|
||||
const res = await transporter.sendMail(data)
|
||||
//console.log('res envoie',res)
|
||||
return { status: 200, ref: "Notifications", msg: "successfullsent", data };
|
||||
} catch (err) {
|
||||
//console.log('err envoie',err)
|
||||
return { status: 502, ref: "Notifications", msg: "errsendmail", err: err };
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = Notifications;
|
||||
|
@@ -6,40 +6,85 @@ const axios = require("axios");
|
||||
const conf = require(`${process.env.dirtown}/conf.json`);
|
||||
const Checkjson = require(`./Checkjson.js`);
|
||||
|
||||
/* This manage Objects for indexing and check and act to CRUD
|
||||
objectpath/objects/schema/objectName.json
|
||||
/objectNames/searchindes/objectName_valueofkey_uuildlist.json
|
||||
/objectNames/uuid.json
|
||||
/**
|
||||
* This manage Objects for indexing, searching, checking and act to CRUD
|
||||
* @objectPathName = objectpath/objectname
|
||||
* objectpath/objectname/conf.json
|
||||
* /idx/all_key1.json = {key1value:{object}}
|
||||
* lst_key1.json = [key1valA,key1val2,...]
|
||||
* key2_key1.json = {key2value:[key1value]}
|
||||
* all_key1_filtername = {key1value:{object}}
|
||||
* /itm/apxidvalue.json
|
||||
* in conf.json:
|
||||
* {
|
||||
* "name": "object name ex:'nations'",
|
||||
* "schema": "relativ schema from dirapi dirtown ex:'adminapi/schema/nations.json'"",
|
||||
* "lastupdateschema": 0, time stamp last schema update
|
||||
* "lastupdatedata":0 time stamp last itm update
|
||||
* }
|
||||
*
|
||||
* Specifics key in schema to apXtrib:
|
||||
* apxid : the field value to use to store item
|
||||
* apxuniquekey : list of field that has to be unique you cannot have 2 itm with same key value
|
||||
* apxidx : list of index file /idx/
|
||||
* { "name":"lst_fieldA", "keyval": "alias" }, => lst_fieldA.json = [fieldAvalue1,...]
|
||||
{ "name":"all_fieldA", "keyval": "fieldA" }, => all_fieldA.json =
|
||||
if fieldA in apxuniquekey = {fieldAvalue1:{object}}
|
||||
not in apxuniquekey = {fieldAvalue1:[{object}]}
|
||||
{ "name":"word_fieldA", "keyval": "fieldA", "objkey": ["fieldB"]}, => word_fieldA.json
|
||||
if fieldA in apxuniquekey = {fieldAvalue1:fieldBvalue,}
|
||||
if fieldA not in uniquekey = {fieldAvalue1: [fieldBv1,fieldBv2,]}
|
||||
{ "name":"word_fieldA", "keyval": "fieldA", "objkey": ["fieldB","fieldC"]}, => word_fieldA.json
|
||||
if fieldA in apxuniquekey = {fieldAvalue1:{fieldB:val,fieldC:val},}
|
||||
if fieldA not in uniquekey = {fieldAvalue1: [{fieldB:val,fieldC:val},]}
|
||||
* apxaccessrights : list of profil with CRUD accesrights
|
||||
|
||||
|
||||
|
||||
**/
|
||||
|
||||
*/
|
||||
const Odmdb = {};
|
||||
/*
|
||||
Input: metaobject => data mapper of Key: Value
|
||||
|
||||
objname + an object {} + action Checkjson => get a valid or not answer
|
||||
objname + an object {} + action search => apply matching algo to find probalistic object id
|
||||
objname + action index => update /searcindex of objects concern
|
||||
/**
|
||||
* @api syncObject
|
||||
* @param {string} url to an existing object conf (/objectname/conf.json)
|
||||
* @param {timestamp} timestamp
|
||||
* 0 => rebuild local object from all_{idapx}.json
|
||||
* >0 => update itm and idx search by datetime
|
||||
* @param
|
||||
*/
|
||||
Odmdb.syncObject = () => {};
|
||||
|
||||
*/
|
||||
|
||||
Odmdb.setObject = (schemaPath, objectPath, objectName, schema, lgjson, lg) => {
|
||||
/**
|
||||
*
|
||||
* @schemapath {string} path to create or replace a schema ${schemaPath}/schema/
|
||||
* @objectPath {string} path where object are store
|
||||
* @objectName {string} name of the object
|
||||
* @schema {object} the json schema for this object
|
||||
* @lgjson {object} the json file for a specific language
|
||||
* @lg {string} the 2 letters language
|
||||
*
|
||||
* a shema :
|
||||
* schemaPath/schema/objectName.json
|
||||
* /lg/objectName_{lg}.json
|
||||
* an object :
|
||||
* objectPath/objectName/idx/confjson ={"schema":"relativpathfile or http"}
|
||||
* /uniqueid.json defining schema
|
||||
*
|
||||
*/
|
||||
/**
|
||||
* @api createObject: create a space to host object
|
||||
*
|
||||
* @source {string} "new", url,
|
||||
* @schemapath {string} path to create or replace a schema ${schemaPath}/schema/
|
||||
* @objectPath {string} path where object are store
|
||||
* @objectName {string} name of the object
|
||||
* @schema {object} the json schema for this object
|
||||
* @lgjson {object} the json file for a specific language
|
||||
* @lg {string} the 2 letters language
|
||||
*
|
||||
* Setup a new object localy =>
|
||||
* source
|
||||
*
|
||||
* - from scratch =>
|
||||
* Create
|
||||
* - from a synchronization
|
||||
* Download from source all_{apxid}.json
|
||||
*
|
||||
*
|
||||
*/
|
||||
Odmdb.createObject = (
|
||||
source,
|
||||
schemaPath,
|
||||
objectPath,
|
||||
objectName,
|
||||
schema,
|
||||
lgjson,
|
||||
lg
|
||||
) => {
|
||||
if (!fs.existsSync(schemaPath)) {
|
||||
return {
|
||||
status: 404,
|
||||
@@ -85,165 +130,99 @@ Odmdb.setObject = (schemaPath, objectPath, objectName, schema, lgjson, lg) => {
|
||||
return { status: 200 };
|
||||
};
|
||||
|
||||
Odmdb.schema = (schemaPath, objectName, withschemacheck) => {
|
||||
// Return schema if exist and objectpath contain objectName { status:200;data:schema}
|
||||
if (!fs.existsSync(`${schemaPath}/${objectName}`))
|
||||
return {
|
||||
status: 404,
|
||||
info: "|odmdb|schemapathnamedoesnotexist",
|
||||
moreinfo: `${schemaPath}/${objectName}`,
|
||||
};
|
||||
if (!fs.existsSync(`${objectPath}/schema/${objectName}.json`)) {
|
||||
return {
|
||||
status: 404,
|
||||
info: `|odmdb|schemanotfound`,
|
||||
moreinfo: `file not found ${schemaPath}/schema/${objectName}.json`,
|
||||
};
|
||||
}
|
||||
const schema = fs.readJsonSync(`${schemaPath}/schema/${objectName}.json`);
|
||||
// check schema apx validity specificities primary unique ans searchindex
|
||||
if (withschemacheck) {
|
||||
if (!schema.apxprimarykey) {
|
||||
// code 422: unprocessable Content
|
||||
return {
|
||||
status: 422,
|
||||
info: "|Odmdb|apxprimarykeynotfound",
|
||||
moreinfo: `${schemaPath}/schema/${objectName}.json`,
|
||||
};
|
||||
} else {
|
||||
if (
|
||||
!(
|
||||
schema.apxsearchindex[schema.apxprimarykey] &&
|
||||
schema.apxsearchindex[schema.apxprimarykey].list
|
||||
)
|
||||
) {
|
||||
return {
|
||||
status: 422,
|
||||
info: "|Odmdb|apxprimaryketnotinsearchindexlist",
|
||||
moreinfo: `${schemaPath}/schema/${objectName}.json`,
|
||||
};
|
||||
}
|
||||
if (schema.apxuniquekey) {
|
||||
schema.apxuniquekey.forEach((k) => {
|
||||
if (
|
||||
!(
|
||||
schema.apxsearchindex[k] &&
|
||||
schema.apxsearchindex[k][schema.apxprimarykey]
|
||||
)
|
||||
) {
|
||||
return {
|
||||
status: 422,
|
||||
info: "|Odmdb|apxuniquekeynotinsearchindex",
|
||||
moreinfo: `${schemaPath}/schema/${objectName}.json`,
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
const validschema = Checkjson.schema.validation(schema);
|
||||
if (validschema.status != 200) return validschema;
|
||||
}
|
||||
return {
|
||||
/**
|
||||
* Update an object
|
||||
* @param {string} objectPathname
|
||||
* @param {object} meta update request
|
||||
* lg:
|
||||
* lgobj: object to store in /lg/objectname_lg.json
|
||||
* schema: an update schema
|
||||
* @return {status, ref:"Odmdb", msg:"", data}
|
||||
*
|
||||
* Create a tmp object env and check existing object to identify any issues
|
||||
* If all is fine then apply change by replacing
|
||||
*/
|
||||
Odmdb.updateObject = (objectPathname, meta) => {};
|
||||
|
||||
/**
|
||||
* Get a schema from objectPathname
|
||||
*
|
||||
* todo only local schema => plan a sync each 10minutes
|
||||
* @schemaPath local path adminapi/schema/objectName.json or /tribename/schema/objectName
|
||||
* @validschema boolean if necessary to check schema or not mainly use when change schema
|
||||
* @return {status:200,data:{conf:"schemaconf",schema:"schemacontent"} }
|
||||
*/
|
||||
Odmdb.Schema = (objectPathname, validschema) => {
|
||||
const confschema = fs.readJsonSync(`${objectPathname}/conf.json`);
|
||||
let schemaPath = confschema.schema;
|
||||
const res = {
|
||||
status: 200,
|
||||
data: schema,
|
||||
ref: "Odmdb",
|
||||
msg: "getschema",
|
||||
data: { conf: confschema },
|
||||
};
|
||||
};
|
||||
|
||||
//Odmdb.Checkjson = (objectPath, objectName, data, withschemacheck) => {
|
||||
/*
|
||||
@objectPath path to the folder that contain /objects/objectName/ /lg/objectName_{lg}.json /schema/objectName.json
|
||||
@objectName name of object
|
||||
@data data to check based on schema objectName definition
|
||||
|
||||
@return status:200 Data is consistent with schema and primarykey does not exist
|
||||
status:201 Data is consistent with schema and primarykey does already exist
|
||||
status:other means unconsistent schema:
|
||||
404: schema does not exist
|
||||
or unconsitent data and schema from Checkjson.js Checkjson.schema.data
|
||||
|
||||
*/
|
||||
/* const res = { status: 200,ref="Odmdb",msg:"",data:{} };
|
||||
//get schema link of object
|
||||
const schemaPath = fs.readJsonSync(
|
||||
`${objectPath}/${objectName}/idx/conf.json`
|
||||
)["schema"];
|
||||
if (schemaPath.substring(0, 4) == "http") {
|
||||
// lance requete http pour recuperer le schema
|
||||
} else {
|
||||
res.data.schema = Odmdb.schema(objectPath, objectName, withschemacheck);
|
||||
}
|
||||
// check schema validity in case withschemacheck
|
||||
if (schema.status != 200) return ;
|
||||
console.log("SCHEMA for checking:");
|
||||
console.log(schema.data);
|
||||
console.log("DATA to check:");
|
||||
console.log(data);
|
||||
// withschemacheck at false, if check then it is done at Odmdb.schema
|
||||
const validate = Checkjson.schema.data(schema.data, data, false);
|
||||
if (validate.status != 200) {
|
||||
return validate;
|
||||
}
|
||||
if (
|
||||
schema.data.apxprimarykey &&
|
||||
data[k] &&
|
||||
fs.existsSync(`${objectPath}/${objectName}/${data[k]}.json}`)
|
||||
) {
|
||||
res.status = 201; // means created => exist an object with this primary key
|
||||
}
|
||||
if (schema.data.apxuniquekey) {
|
||||
schema.data.apxuniquekey.forEach((k) => {
|
||||
if (
|
||||
data[k] &&
|
||||
fs.existsSync(
|
||||
`${objectPath}/${objectName}/searchindex/${objectName}_${k}_${schema.data.apxprimarykey}.json}`
|
||||
) &&
|
||||
fs.readJsonSync(
|
||||
`${objectPath}/${objectName}/searchindex/${objectName}_${k}_${schema.data.apxprimarykey}.json}`
|
||||
)[k]
|
||||
) {
|
||||
res.status = 201; // means created => exist as primary key
|
||||
}
|
||||
});
|
||||
}
|
||||
return res;
|
||||
};
|
||||
*/
|
||||
Odmdb.getSchema=async (schemaPath,validschema)=>{
|
||||
/**
|
||||
* @schemaPath public http link or local path adminapi/schema/objectName.json or /tribename/schema/objectName
|
||||
* @return schema or {}
|
||||
*/
|
||||
const res={status:200,data:{schema:{}}}
|
||||
if (schemaPath.slice(-5)!=".json") schemaPath+=".json";
|
||||
if (schemaPath.slice(-5) != ".json") schemaPath += ".json";
|
||||
if (schemaPath.substring(0, 4) == "http") {
|
||||
// lance requete http pour recuperer le schema avec un await axios
|
||||
} else {
|
||||
if (schemaPath.substring(0,9)=="adminapi/"){
|
||||
schemaPath=`${conf.dirapi}/${schemaPath}`
|
||||
}else{
|
||||
schemaPath=`${conf.dirtown}/tribes/${schemaPath}`
|
||||
if (schemaPath.substring(0, 9) == "adminapi/") {
|
||||
schemaPath = `${conf.dirapi}/${schemaPath}`;
|
||||
} else {
|
||||
schemaPath = `${conf.dirtown}/tribes/${schemaPath}`;
|
||||
}
|
||||
if (!fs.existsSync(schemaPath)){
|
||||
return {status:404, ref:"Odmdb", msg:"schemanotfound", data:{schemaPath,schema:{}}}
|
||||
if (!fs.existsSync(schemaPath)) {
|
||||
return {
|
||||
status: 404,
|
||||
ref: "Odmdb",
|
||||
msg: "schemanotfound",
|
||||
data: { schemaPath, schema: {} },
|
||||
};
|
||||
}
|
||||
res.data.schema=fs.readJsonSync(schemaPath)
|
||||
if (validschema ||1==1){
|
||||
const check = Checkjson.schema.validation(res.data.schema)
|
||||
if (check.err.length>0) {
|
||||
res.status=check.status
|
||||
res.data.err=check.err
|
||||
res.data.schema = fs.readJsonSync(schemaPath);
|
||||
if (!res.data.schema.apxid) {
|
||||
return {
|
||||
status: 406,
|
||||
ref: "Odmdb",
|
||||
msg: "missingprimarykey",
|
||||
data: {},
|
||||
};
|
||||
}
|
||||
if (res.data.schema.apxidx) {
|
||||
//add apxidx to apxuniquekey in case not
|
||||
if (!res.data.schema.apxuniquekey.includes(res.data.schema.apxid)) {
|
||||
res.data.schema.apxuniquekey.push(res.data.schema.apxid);
|
||||
}
|
||||
res.data.schema.apxidx.forEach((idx) => {
|
||||
if (
|
||||
idx.objkey &&
|
||||
!res.data.schema.apxuniquekey.includes(idx.keyval) &&
|
||||
!idx.objkey.includes(res.data.schema.apxid)
|
||||
) {
|
||||
return {
|
||||
status: 406,
|
||||
ref: "Odmdb",
|
||||
msg: "unconsistencyapxidx",
|
||||
data: {
|
||||
name: idx.name,
|
||||
keyval: idx.keyval,
|
||||
objkey: idx.objkey,
|
||||
apxid: res.data.schema.apxid,
|
||||
},
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
if (validschema || 1 == 1) {
|
||||
// return {status:200, ref, msg} or {status!:200,multimsg:[{ref,msg;data}]}
|
||||
const check = Checkjson.schema.validation(res.data.schema);
|
||||
if (check.status != 200) {
|
||||
res.multimsg = check.multimsg;
|
||||
res.status = check.status;
|
||||
}
|
||||
//check json schema for Odmdb context
|
||||
if (!res.data.schema.apxprimarykey || !res.data.schema.properties[res.data.schema.apxprimarykey]){
|
||||
// primarykey require for Odmdb
|
||||
res.status=406
|
||||
if (!res.data.err) res.data.err=[];
|
||||
res.data.err.push({ref:"Odmdb",msg:"novalidprimarykey",data:{apxprimarykey:res.data.schema.apxprimarykey}})
|
||||
}
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
return res;
|
||||
};
|
||||
Odmdb.search = (objectPath, objectName, search) => {
|
||||
/*
|
||||
@search= {
|
||||
@@ -263,96 +242,367 @@ Odmdb.search = (objectPath, objectName, search) => {
|
||||
const schema = Odmdb.schema(objectPath, objectName);
|
||||
if (schema.status != 200) return schema;
|
||||
};
|
||||
Odmdb.get = (objectPath, objectName, uuidprimarykeyList, fieldList) => {
|
||||
/*
|
||||
@objectPath where object are store (where /object/conf.json indicate where the schema is)
|
||||
@uuidprimarykeyList list of uuid requested
|
||||
@fieldList key to return for each object
|
||||
Return {status:200; data:{uuid:{data filter by @fieldList},uuid:"notfound"}}
|
||||
*/
|
||||
|
||||
/**
|
||||
* To get an array of item (itm) per primarykey with a list of field
|
||||
* Object are store in objectPath/objectName/conf.json contain
|
||||
*
|
||||
* @objectPathname where object are store (where /object/conf.json indicate where the schema is)
|
||||
* @uuidprimarykeyList list of uuid requested
|
||||
* @role {xalias,xprofiles} allow to get accessright come from header
|
||||
* @propertiesfilter (optionnal) key to return for each object (if undefined then return all)
|
||||
* @Return {status:200; data:{uuid:{data filter by @propertiesfilter},uuid:"notfound"}}
|
||||
*/
|
||||
Odmdb.reads = (objectPathname, apxidlist, role, propertiesfilter) => {
|
||||
const res = { status: 200, data: {} };
|
||||
uuidprimarykeyList.forEach(id => {
|
||||
if (fs.existsSync(`${objectPath}/${objectName}/itm/${id}.json`)) {
|
||||
const objectdata = fs.readJsonSync(
|
||||
`${objectPath}/${objectName}/itm/${id}.json`
|
||||
);
|
||||
if (!fieldList) {
|
||||
res.data[id]=objectdata;
|
||||
const getschema = Odmdb.Schema(objectPathname, true);
|
||||
if (getschema.status != 200) return getschema;
|
||||
// Test if readable at least if owner
|
||||
role.xprofils.push("owner");
|
||||
const accessright = (Odmdb.accessright =
|
||||
(getschema.data.schema.apxaccessright, role.xprofils));
|
||||
if (!accessright.R) {
|
||||
return {
|
||||
status: 403,
|
||||
ref: "Odmdb",
|
||||
msg: "accessforbidden",
|
||||
data: { crud: "R", accessright },
|
||||
};
|
||||
}
|
||||
apxidlist.forEach((id) => {
|
||||
if (fs.existsSync(`${objectPathname}/itm/${id}.json`)) {
|
||||
const objectdata = fs.readJsonSync(`${objectPathname}/itm/${id}.json`);
|
||||
if (objectdata.owner && objectdata.owner == role.xalias) {
|
||||
if (!role.xprofils.includes("owner")) role.xprofils.push("owner");
|
||||
} else {
|
||||
if (role.xprofils.includes("owner"))
|
||||
role.xprofils = role.xprofils.filter((e) => e !== "owner");
|
||||
}
|
||||
const accessright = Odmdb.accessright(
|
||||
getschema.data.schema.apxaccessright,
|
||||
role
|
||||
);
|
||||
if (!accessright.R) {
|
||||
res.data[id] = "forbiden";
|
||||
} else {
|
||||
let newpropertiesfilter = Object.keys(objectdata);
|
||||
if (accessright.R.length > 0) {
|
||||
const setaccess = new Set(accessright.R);
|
||||
if (!propertiesfilter) propertiesfilter = Object.keys(objectdata);
|
||||
newpropertiesfilter = propertiesfilter.filter((f) =>
|
||||
setaccess.has(f)
|
||||
);
|
||||
}
|
||||
const objinfo = {};
|
||||
fieldlList.forEach((k) => {
|
||||
newpropertiesfilter.forEach((k) => {
|
||||
if (objectdata[k]) objinfo[k] = objectdata[k];
|
||||
});
|
||||
res.data[id]=objinfo;
|
||||
res.data[id] = objinfo;
|
||||
}
|
||||
} else {
|
||||
res.data[id]="notfound";
|
||||
res.data[id] = "notfound";
|
||||
}
|
||||
});
|
||||
return res;
|
||||
};
|
||||
Odmdb.create = (objectPath, objectName, data, accessright) => {
|
||||
/*
|
||||
Create an objects data into objectName
|
||||
@objectPath path to the folder that contain /objects/objectName/ /objectsInfo/objectName_lg.json /objectsMeta/objectName.json
|
||||
@objectName name of object
|
||||
@data data to check based on objectsMeta definition
|
||||
@accessright a string with accessright of the user on this objectName ex: "CRUDO" or "R" or "O"
|
||||
*/
|
||||
/**
|
||||
* Convert profils in accessright
|
||||
* @param {*} apxaccessright from schema object {profilname:{R}}
|
||||
* @param {*} role {xprofils,xalias} accessible after isAuthenticated
|
||||
* @returns access right to C create if present, to read (properties list or all if empty), to Update properties list or all if empty, D elete
|
||||
* example: {"C":[],"R":[properties list],"U":[properties ist],"D":[]}
|
||||
*/
|
||||
Odmdb.accessright = (apxaccessrights, role) => {
|
||||
const accessright = {};
|
||||
role.xprofils.forEach((p) => {
|
||||
if (apxaccessrights[p]) {
|
||||
Object.keys(apxaccessrights[p]).forEach((act) => {
|
||||
if (!accessright[act]) {
|
||||
accessright[act] = apxaccessrights[p][act];
|
||||
} else {
|
||||
accessright[act] = [
|
||||
...new Set([...accessright[act], ...apxaccessrights[p][act]]),
|
||||
];
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
return accessright;
|
||||
};
|
||||
Odmdb.update = async (objectPath, objectName, data, id, accessright) => {
|
||||
/*
|
||||
Create an objects data into objectName
|
||||
@objectPath path to the folder that contain /objects/objectName/ /objectsInfo/objectName_lg.json /objectsMeta/objectName.json
|
||||
@objectName name of object
|
||||
@data data to check based on objectsMeta definition
|
||||
*/
|
||||
if (!fs.existsSync(`${objectPath}/${objectName}/itm/${id}.json`)){
|
||||
return {status:404,ref:"Odmdb",msg:"itmnotfound",data:{objectPath,objectName,id}}
|
||||
}
|
||||
const currentobj=fs.readJSONSync(`${objectPath}/${objectName}/itm/${id}.json`)
|
||||
Object.keys(data).forEach(k=>{
|
||||
currentobj[k]=data[k]
|
||||
})
|
||||
if (currentobj.dt_update) currentobj.dt_update=dayjs().toISOString();
|
||||
const schemaPath = fs.readJsonSync(
|
||||
`${objectPath}/${objectName}/conf.json`
|
||||
)["schema"];
|
||||
const getschema = await Odmdb.getSchema(schemaPath);
|
||||
if (getschema.status!=200 || Object.keys(getschema.data.schema).length==0) {
|
||||
console.log('this is not suppose to happen in Odmdb',Object.keys(getschema.data.schema))
|
||||
return getschema
|
||||
}
|
||||
const schema=getschema.data.schema;
|
||||
const check = Checkjson.schema.data(schema,currentobj,false);
|
||||
console.log(check)
|
||||
if (check.err.length==0){
|
||||
// update
|
||||
fs.outputJsonSync(`${objectPath}/${objectName}/itm/${id}.json`,currentobj)
|
||||
//@todo select index file to generate depending of k update currently we re-index all
|
||||
/**
|
||||
* CUD a data itm into objectPathname if checkJson is valid
|
||||
* and update idx
|
||||
* idx is upto date for unique properties but not for list
|
||||
* @param {string} objectpathname folder name where object are stored
|
||||
* @param {object} itm an object respecting the checkJson schema in objectPathname/conf.json
|
||||
* @param {string} crud: C reate U pdate D elete
|
||||
* @param {array} role {xprofils,xalias} xprofils list of profils like anonymous,pagans, person owner is deuce if object properties owner is alias
|
||||
|
||||
return {status:200,ref:"Odmdb",msg:"updatesuccessfull"}
|
||||
}else{
|
||||
return {status:409, ref:"Odmdb",msg:"datavsschemaunconsistent",data:check.err}
|
||||
}
|
||||
};
|
||||
Odmdb.delete = (objectPath, objectName, data,accessright) => {
|
||||
/*
|
||||
Create an objects data into objectName
|
||||
@objectPath path to the folder that contain /objects/objectName/ /objectsInfo/objectName_lg.json /objectsMeta/objectName.json
|
||||
@objectName name of object
|
||||
@data data to check based on objectsMeta definition
|
||||
*/
|
||||
};
|
||||
/*console.log("test Odmdb");
|
||||
console.log(
|
||||
Odmdb.check(
|
||||
"/media/phil/usbfarm/apxtrib/nationchains/socialworld/objects",
|
||||
"nations",
|
||||
{ nationId: "123", status: "unchain" }
|
||||
)
|
||||
);*/
|
||||
* */
|
||||
|
||||
Odmdb.cud = (objectPathname, crud, itm, role) => {
|
||||
const getschema = Odmdb.Schema(objectPathname, true);
|
||||
if (getschema.status != 200) return getschema;
|
||||
|
||||
if (!itm[getschema.data.schema.apxid]) {
|
||||
return {
|
||||
status: 406,
|
||||
ref: "Odmdb",
|
||||
msg: "apxidmissing",
|
||||
data: { missingkey: getschema.data.schema.apxid },
|
||||
};
|
||||
}
|
||||
const existid = fs
|
||||
.readJSONSync(
|
||||
`${objectPathname}/idx/lst_${getschema.data.schema.apxid}.json`
|
||||
)
|
||||
.includes(itm[getschema.data.schema.apxid]);
|
||||
if (existid && crud == "C") {
|
||||
return {
|
||||
status: 406,
|
||||
ref: "Odmdb",
|
||||
msg: "alreadyexist",
|
||||
data: {
|
||||
objectname: path.basename(objectPathname),
|
||||
key: getschema.data.schema.apxid,
|
||||
val: itm[getschema.data.schema.apxid],
|
||||
},
|
||||
};
|
||||
}
|
||||
if (!existid && ["U", "D"].includes(crud)) {
|
||||
return {
|
||||
status: 406,
|
||||
ref: "Odmdb",
|
||||
msg: "doesnotexist",
|
||||
data: {
|
||||
objectname: path.basename(objectPathname),
|
||||
key: getschema.data.schema.apxid,
|
||||
val: itm[getschema.data.schema.apxid],
|
||||
},
|
||||
};
|
||||
}
|
||||
const itmold = existid
|
||||
? fs.readJSONSync(
|
||||
`${objectPathname}/itm/${itm[getschema.data.schema.apxid]}.json`
|
||||
)
|
||||
: {};
|
||||
if (existid && itmold.owner && itmold.owner == role.xalias) {
|
||||
role.xprofils.push("owner");
|
||||
} else {
|
||||
// set owner cause this is a Create
|
||||
itm.owner = role.xalias;
|
||||
}
|
||||
//get accessrigh {C:[],R:[],U:[],D:[]} if exist means authorize, if array contain properties (for R and U) right is only allowed on properties
|
||||
const accessright = Odmdb.accessright(
|
||||
getschema.data.schema.apxaccessrights,
|
||||
role
|
||||
);
|
||||
console.log("accessright", accessright);
|
||||
if (
|
||||
(crud == "C" && !accessright.C) ||
|
||||
(crud == "D" && !accessright.D) ||
|
||||
(crud == "U" && !accessright.U)
|
||||
) {
|
||||
return {
|
||||
status: 403,
|
||||
ref: "Odmdb",
|
||||
msg: "accessforbidden",
|
||||
data: { crud, accessright },
|
||||
};
|
||||
}
|
||||
//delete or save
|
||||
if (crud == "D") {
|
||||
itmold["dt_delete"] = dayjs();
|
||||
fs.outputJSONSync(
|
||||
`${objectPathname}/delitm/${itmold[getschema.data.schema.apxid]}.json`,
|
||||
itmold
|
||||
);
|
||||
fs.rmSync(
|
||||
`${objectPathname}/itm/${itmold[getschema.data.schema.apxid]}.json`
|
||||
);
|
||||
} else {
|
||||
// if Create Update erase old version
|
||||
let itmtostore = itm;
|
||||
if (crud == "U" && accessright.U.length > 0) {
|
||||
itmtostore = itmold;
|
||||
accessright.U.forEach((p) => {
|
||||
itmtostore[p] = itm[p];
|
||||
});
|
||||
itmtostore.dt_update = dayjs();
|
||||
}
|
||||
if (crud == "C") itmtostore.dt_create = dayjs();
|
||||
// check consistency of datatostore
|
||||
const chkdata = Checkjson.schema.data(
|
||||
getschema.data.schema,
|
||||
itmtostore,
|
||||
false
|
||||
);
|
||||
if (chkdata.status != 200) return chkdata;
|
||||
if (!getschema.data.schema.apxuniquekey)
|
||||
getschema.data.schema.apxuniquekey = [];
|
||||
|
||||
fs.outputJSONSync(
|
||||
`${objectPathname}/itm/${chkdata.data.apxid}.json`,
|
||||
chkdata.data.itm
|
||||
);
|
||||
}
|
||||
console.log("getschema", getschema);
|
||||
//update idx
|
||||
Odmdb.idxfromitm(
|
||||
objectPathname,
|
||||
crud,
|
||||
itm,
|
||||
itmold,
|
||||
[],
|
||||
getschema.data.schema
|
||||
);
|
||||
getschema.data.conf.lastupdatedata = dayjs();
|
||||
fs.outputJSONSync(`${objectPathname}/conf.json`, getschema.data.conf);
|
||||
return {
|
||||
status: 200,
|
||||
ref: "Odmdb",
|
||||
msg: "cudsuccessfull",
|
||||
data: { itm: chkdata.data.itm },
|
||||
};
|
||||
};
|
||||
/**
|
||||
* create/update idx from itm(s)
|
||||
*
|
||||
* @param {string} objectPathname
|
||||
* @param {object} itm item to Create or to Update or {} if crud == I or crud == D
|
||||
* @param {object} itmold (current item) if crud == U or D to get previous itm before change or {} if crud==I or C
|
||||
* @param {letter} crud CUDI C add, U update, D delete I reindex
|
||||
* @param {array} idx if specific request to rebuild list of idx only if [] then use schema one
|
||||
* @param {object} schema if empty it use schema from Odmdb.Schema().data.schema
|
||||
*
|
||||
* example create alias 12 name fred:
|
||||
* Odmdb.idxfromitm('.../tribes/ndda/persons',"C",{alias:'12',name:"fred"},{},[], {person schema})
|
||||
* example update alias 12 in name freddy:
|
||||
* Odmdb.idxfromitm('.../tribes/ndda/persons',"U",{alias:'12',name:"freddy"},{alias:'12',name:"fred"},[], {person schema})
|
||||
* example delete alias 12:
|
||||
* Odmdb.idxfromitm('.../tribes/ndda/persons',"D",{},{alias:'12',name:"fred"},[], {person schema})
|
||||
* example to rebuild all index from scratch
|
||||
* Odmdb.idxfromitm('.../tribes/ndda/persons',"I",{},{},[], {person schema})
|
||||
* example to rebuild only publickey_alias index from scratch
|
||||
* Odmdb.idxfromitm('.../tribes/ndda/pagans',"I",{},{},[{ name:"publickey_alias",keyval:"publickey",objkey:["alias"]}], {pagans schema})
|
||||
*
|
||||
*/
|
||||
Odmdb.idxfromitm = (objectPathname, crud, itm, itmold, idxs = [], schema) => {
|
||||
console.log(`idxfromitem for ${objectPathname} action:${crud}`);
|
||||
if (!schema || !schema.apxid) {
|
||||
const getschema = Odmdb.Schema(objectPathname, true);
|
||||
if (getschema.status != 200) return getschema;
|
||||
schema = getschema.data.schema;
|
||||
}
|
||||
console.log(schema.apxuniquekey);
|
||||
const itms = crud == "I" ? glob.sync(`${objectPathname}/itm/*.json`) : [itm];
|
||||
console.log(itms);
|
||||
if (crud == "I") {
|
||||
//reinit all idx
|
||||
idxs.forEach((idx) => {
|
||||
fs.remove(`${objectPathname}/idx/${idx.name}.json`);
|
||||
});
|
||||
}
|
||||
let idxtoreindex = []; //store index that has to be reprocessto get full context
|
||||
idxs = idxs.length == 0 ? schema.apxidx : idxs; // get all index if none
|
||||
itms.forEach((i) => {
|
||||
if (crud == "I") {
|
||||
itm = fs.readJSONSync(i);
|
||||
}
|
||||
//console.log(itm);
|
||||
idxs.forEach((idx) => {
|
||||
const keyvalisunique = schema.apxuniquekey.includes(idx.keyval); // check if keyval is unique mean store as an object (or string) else store as an array
|
||||
const idxsrc = `${objectPathname}/idx/${idx.name}.json`;
|
||||
const idxinit = idx.name.substring(0, 4) == "lst_" ? [] : {}; // select type of idx (array or object)
|
||||
let idxfile = !fs.existsSync(idxsrc) ? idxinit : fs.readJSONSync(idxsrc);
|
||||
if (idx.name.substring(0, 4) == "lst_") {
|
||||
if (["D", "U"].includes(crud)) {
|
||||
if (keyvalisunique) {
|
||||
idxfile = idxfile.filter((e) => e !== itmold[idx.keyval]);
|
||||
} else {
|
||||
idxtoreindex.push(idx); //@todo
|
||||
}
|
||||
}
|
||||
console.log(idx.keyval);
|
||||
console.log(itm[idx.keyval]);
|
||||
|
||||
if (
|
||||
["C", "U", "I"].includes(crud) &&
|
||||
!idxfile.includes(itm[idx.keyval])
|
||||
) {
|
||||
idxfile.push(itm[idx.keyval]);
|
||||
}
|
||||
} else {
|
||||
if (!idx.objkey) {
|
||||
//mean all properties
|
||||
idx.objkey = Object.keys(schema.properties);
|
||||
}
|
||||
if (keyvalisunique && idx.objkey.length == 1) {
|
||||
if (["D", "U"].includes(crud)) {
|
||||
delete idxfile[itmold[idx.keyval]];
|
||||
} else {
|
||||
idxfile[itm[idx.keyval]] = itm[idx.objkey[0]];
|
||||
}
|
||||
}
|
||||
if (keyvalisunique && idx.objkey.length > 1) {
|
||||
if (["D", "U"].includes(crud)) {
|
||||
delete idxfile[itmold[idx.keyval]];
|
||||
} else {
|
||||
const itmfilter = {};
|
||||
idx.objkey.forEach((i) => {
|
||||
if (itm[i]) itmfilter[i] = itm[i];
|
||||
});
|
||||
idxfile[itm[idx.keyval]] = itmfilter;
|
||||
}
|
||||
}
|
||||
if (!keyvalisunique && idx.objkey.length == 1) {
|
||||
if (
|
||||
["D", "U"].includes(crud) &&
|
||||
idxfile[itmold[idx.keyval]].IndexOf(itmold[idx.objkey[0]]) > -1
|
||||
) {
|
||||
// U because need to remove previous value before adding it
|
||||
idxfile[itmold[idx.keyval]].splice(
|
||||
idxfile[itmold[idx.keyval]].IndexOf(itmold[idx.objkey[0]]),
|
||||
1
|
||||
);
|
||||
}
|
||||
if (["C", "U", "I"].includes(crud)) {
|
||||
if (!idxfile[itm[idx.keyval]]) idxfile[itm[idx.keyval]] = [];
|
||||
if (!idxfile[itm[idx.keyval]].includes(itm[idx.objkey[0]])) {
|
||||
idxfile[itm[idx.keyval]].push(itm[idx.objkey[0]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!keyvalisunique && idx.objkey.length > 1) {
|
||||
if (["D", "U"].includes(crud) && idxfile[itmold[idx.keyval]]) {
|
||||
// U because need to remove previous value before adding it
|
||||
let arrayofit = [];
|
||||
idxfile[itmold[idx.keyval]].forEach((it) => {
|
||||
if (it[schema.apxid] != itm[schema.apxid]) arrayofit.push(it);
|
||||
});
|
||||
idxfile[itmold[idx.keyval]] = arrayofit;
|
||||
}
|
||||
if (["C", "U", "I"].includes(crud)) {
|
||||
const itmfilter = {};
|
||||
idx.objkey.forEach((i) => {
|
||||
if (itm[i]) itmfilter[i] = itm[i];
|
||||
});
|
||||
if (!idxfile[itm[idx.keyval]]) idxfile[itm[idx.keyval]] = [];
|
||||
idxfile[itm[idx.keyval]].push(itmfilter);
|
||||
}
|
||||
}
|
||||
}
|
||||
fs.outputJSONSync(idxsrc, idxfile);
|
||||
});
|
||||
});
|
||||
if (crud != "I") {
|
||||
//update lastupdatedata to inform something change
|
||||
const confschema = fs.readJSONSync(`${objectPathname}/conf.json`);
|
||||
confschema.lastupdatedata = dayjs();
|
||||
fs.outputJSONSync(`${objectPathname}/conf.json`, getschema.data.conf);
|
||||
}
|
||||
return { status: 200, ref: "Odmdb", msg: "successreindex", data: {} };
|
||||
};
|
||||
Odmdb.updatefromidxall = (objectname, idxname, data, lastupdate) => {
|
||||
/**
|
||||
* Update all itm of objectname from index idx/idxname with data
|
||||
|
@@ -4,6 +4,8 @@ const dayjs = require("dayjs");
|
||||
const fs = require("fs-extra");
|
||||
const axios = require("axios");
|
||||
const openpgp = require("openpgp");
|
||||
const Notifications = require("../models/Notifications.js");
|
||||
const Odmdb = require("../models/Odmdb.js");
|
||||
|
||||
/*if (fs.existsSync("../../nationchains/tribes/conf.json")) {
|
||||
conf = require("../../nationchains/tribes/conf.json");
|
||||
@@ -16,23 +18,42 @@ const conf = require(`${process.env.dirtown}/conf.json`);
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
const Pagans = {};
|
||||
|
||||
/**
|
||||
* Remove authentification token after a logout
|
||||
* @param {string} alias
|
||||
* @param {string} tribe
|
||||
* @param {integer} xdays
|
||||
* @param {string} xhash
|
||||
* @returns {status:200, ref:"Pagans",msg:"logout"}
|
||||
* tmpfs name file has to be on line with the tmpfs create by isAuthenticated
|
||||
* tmpfs contain profils name for a tribe/
|
||||
*/
|
||||
Pagans.logout = (alias, tribe, xdays, xhash) => {
|
||||
//console.log(alias, tribe, xdays, xhash);
|
||||
// inline with middleware isAuthenticated.js
|
||||
let tmpfs = `${process.env.dirtown}/tmp/tokens/${alias}_${tribe}_${xdays}`;
|
||||
//max filename in ext4: 255 characters
|
||||
tmpfs += `_${xhash.substring(150, 150 + tmpfs.length - 249)}.json`;
|
||||
fs.remove(tmpfs);
|
||||
return { status: 200, ref: "Pagans", msg: "logout" };
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} alias a alias that exist or not
|
||||
* @return {object} { status: 200, ref:"pagans",msg:"aliasexist",data: { alias, publicKey } }
|
||||
* { status: 404, ref:"pagans",msg:"aliasdoesnotexist",data: { alias} }
|
||||
*
|
||||
**/
|
||||
Pagans.getalias = (alias) => {
|
||||
/**
|
||||
* @param {string} alias a alias that exist or not
|
||||
* @return {object} { status: 200, ref:"pagans",msg:"aliasexist",data: { alias, publicKey } }
|
||||
* { status: 404, ref:"pagans",msg:"aliasdoesnotexist",data: { alias} }
|
||||
*
|
||||
**/
|
||||
console.log(`${conf.dirapi}/nationchains/pagans/itm/${alias}.json`);
|
||||
//bypass Odmdb cause all is public
|
||||
if (fs.existsSync(`${conf.dirapi}/nationchains/pagans/itm/${alias}.json`)) {
|
||||
return {
|
||||
status: 200,
|
||||
ref: "Pagans",
|
||||
msg: "aliasexist",
|
||||
data: fs.readJsonSync(
|
||||
data: fs.readJSONSync(
|
||||
`${conf.dirapi}/nationchains/pagans/itm/${alias}.json`
|
||||
),
|
||||
};
|
||||
@@ -46,52 +67,56 @@ Pagans.getalias = (alias) => {
|
||||
}
|
||||
};
|
||||
|
||||
Pagans.getperson = (alias, tribeid) => {
|
||||
/**
|
||||
* @param {string} alias that exist
|
||||
* @param {string} tribeId that exist with a person alias
|
||||
* @return {object} { status: 200, ref:"pagans",msg:"personexist",data: { person } }
|
||||
* { status: 404, ref:"pagans",msg:"persondoesnotexist",data: { person } }
|
||||
*
|
||||
**/
|
||||
if (
|
||||
fs.existsSync(`${conf.dirtown}/tribes/${tribeid}/person/itm/${alias}.json`)
|
||||
) {
|
||||
const person = fs.readJsonSync(
|
||||
`${conf.dirtown}/tribes/${tribeid}/person/itm/${alias}.json`
|
||||
);
|
||||
delete person.auth;
|
||||
return {
|
||||
status: 200,
|
||||
ref: "Pagans",
|
||||
msg: "personexist",
|
||||
data: person,
|
||||
};
|
||||
} else {
|
||||
/**
|
||||
* @param {string} alias that exist
|
||||
* @param {string} tribeId that exist with a person alias
|
||||
* @return {object} { status: 200, ref:"pagans",msg:"personexist",data: { person } }
|
||||
* { status: 404, ref:"pagans",msg:"persondoesnotexist",data: { person } }
|
||||
*
|
||||
**/
|
||||
Pagans.getperson = (tribeid, alias, role) => {
|
||||
const objlst = Odmdb.reads(
|
||||
`${conf.dirtown}/tribes/${tribeid}/persons`,
|
||||
[alias],
|
||||
role
|
||||
);
|
||||
if (objlst.data[alias] == "notfound") {
|
||||
return {
|
||||
status: 404,
|
||||
ref: "Pagans",
|
||||
msg: "persondoesnotexist",
|
||||
data: { alias, tribeid },
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
status: 200,
|
||||
ref: "Pagans",
|
||||
msg: "personexist",
|
||||
data: objlst.data[alias],
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
Pagans.create = (alias, publicKey) => {
|
||||
Pagans.create = (objpagan, role) => {
|
||||
/**
|
||||
* @param {string} alias a unique alias that identify an identity
|
||||
* @param {string} publicKey a publicKey
|
||||
* @param {object} objpagan {alias,publickey} a unique alias/publickey that identify an identity
|
||||
* @param {array} role {xalias,xprofils} requester and list of profil
|
||||
* @return {object} { status: 200, data: { alias, publicKey } }
|
||||
* xhash was checked by isauthenticated
|
||||
* @todo use Odmdb to add a pagan
|
||||
*/
|
||||
return Odmdb.cud(`${conf.dirapi}/nationchains/pagans`, "C", objpagan, role);
|
||||
/*
|
||||
let apxpagans = {};
|
||||
if (fs.existsSync(`${conf.dirapi}/nationchains/pagans/idx/alias_all.json`)) {
|
||||
apxpagans = fs.readJsonSync(
|
||||
`${conf.dirapi}/nationchains/pagans/idx/alias_all.json`
|
||||
);
|
||||
}
|
||||
apxpagans[alias] = { alias, publicKey };
|
||||
if (apxpagans[objpagan.alias]) {
|
||||
return { status: 409, ref: "Pagans", msg: "aliasexist", data: { alias } };
|
||||
}
|
||||
apxpagans[objpagan.alias] = { alias, publicKey };
|
||||
fs.outputJsonSync(
|
||||
`${conf.dirapi}/nationchains/pagans/idx/alias_all.json`,
|
||||
apxpagans
|
||||
@@ -100,52 +125,148 @@ Pagans.create = (alias, publicKey) => {
|
||||
alias,
|
||||
publicKey,
|
||||
});
|
||||
return { status: 200, ref:"Pagans", msg:"identitycreate",data: { alias, publicKey } };
|
||||
return {
|
||||
status: 200,
|
||||
ref: "Pagans",
|
||||
msg: "identitycreate",
|
||||
data: { alias, publicKey },
|
||||
};
|
||||
*/
|
||||
};
|
||||
|
||||
Pagans.personupdate = (alias, tribe, persondata) => {
|
||||
//later use Odmdb ans schema person to manage this
|
||||
/**
|
||||
* @Param {string} alias pagan unique id
|
||||
* @Param {string} tribe tribe id in this town
|
||||
* @Param {object} persondata that respect /nationchains/schema/person.json + nationchains/tribe/tribeid/schema/personextented.json
|
||||
* @return create or update a person /tribe/tribeid/person/alias.json
|
||||
*/
|
||||
let person = {
|
||||
/**
|
||||
* @Param {string} alias pagan unique id
|
||||
* @Param {string} tribeid tribe id in this town
|
||||
* @Param {object} persondata that respect /nationchains/schema/person.json + nationchains/tribe/tribeid/schema/personextented.json
|
||||
* @return create or update a person /tribe/tribeid/person/alias.json
|
||||
* todo later use Odmdb ans schema person to manage this
|
||||
*/
|
||||
Pagans.personupdate = (tribeid, alias, personupdate, role) => {
|
||||
const personinit = {
|
||||
alias: alias,
|
||||
dt_create: dayjs(),
|
||||
accessrights: { profil: "user" },
|
||||
profils: ["person"],
|
||||
};
|
||||
if (
|
||||
fs.existsSync(
|
||||
`${process.env.dirtown}/tribes/${tribe}/person/itm/${alias}.json`
|
||||
)
|
||||
) {
|
||||
person = fs.readJsonSync(
|
||||
`${process.env.dirtown}/tribes/${tribe}/person/itm/${alias}.json`
|
||||
);
|
||||
person.dt_update = dayjs();
|
||||
}
|
||||
Object.keys(persondata).forEach((d) => {
|
||||
person[d] = persondata[d];
|
||||
const personfile = `${process.env.dirtown}/tribes/${tribeid}/person/itm/${alias}.json`;
|
||||
const persondata = fs.existsSync(personfile)
|
||||
? fs.readJSONSync(personfile)
|
||||
: personinit;
|
||||
persondata.dt_update = dayjs();
|
||||
|
||||
Object.keys(personupdate).forEach((d) => {
|
||||
persondata[d] = personupdate[d];
|
||||
});
|
||||
//const checkjson= Checkjson.schema.data = (fs.readJsonSync(`${conf.dirapi}/nationchains/schema/person.json`, person, false)
|
||||
// if checkjson.status==200 create /update with odmdb to update index data
|
||||
// see odmdb that did all and return standard message
|
||||
fs.outputJSONSync(
|
||||
`${process.env.dirtown}/tribes/${tribe}/person/itm/${alias}.json`,
|
||||
person,
|
||||
{
|
||||
space: 2,
|
||||
}
|
||||
);
|
||||
fs.outputJSONSync(personfile, persondata, { space: 2 });
|
||||
return {
|
||||
status: 200,
|
||||
ref: "Pagans",
|
||||
msg: "successfullupdate",
|
||||
data: { tribe: tribe },
|
||||
data: { alias: alias, tribeid: tribeid },
|
||||
};
|
||||
};
|
||||
/**
|
||||
* Send email with alias's keys to email or person alias person.recovery.email
|
||||
*
|
||||
* If email or pubkey is undefined then get data from tribe/person(alias)
|
||||
* Send email with keys
|
||||
*
|
||||
* @param {string} alias
|
||||
* @param {pgpPrivate} privkey
|
||||
* @param {string} passphrase
|
||||
* @param {string} tribe
|
||||
* @param {pgpPublic} pubkey
|
||||
* @param {string} email
|
||||
*/
|
||||
Pagans.sendmailkey = (
|
||||
alias,
|
||||
privatekey,
|
||||
tribeid,
|
||||
passphrase,
|
||||
publickey,
|
||||
email
|
||||
) => {
|
||||
const person = { alias, privatekey, tribeid };
|
||||
console.log(
|
||||
alias,
|
||||
"-",
|
||||
privatekey,
|
||||
"-",
|
||||
tribeid,
|
||||
"-",
|
||||
passphrase,
|
||||
"-",
|
||||
publickey,
|
||||
"-",
|
||||
email
|
||||
);
|
||||
if (!publickey || !email || !passphrase || !privatekey) {
|
||||
const personfile = `${process.env.dirtown}/tribes/${tribeid}/person/itm/${alias}.json`;
|
||||
const persondata = fs.existsSync(personfile)
|
||||
? fs.readJsonSync(personfile)
|
||||
: {};
|
||||
if (persondata.length == 0) {
|
||||
return {
|
||||
status: 404,
|
||||
ref: "Pagans",
|
||||
msg: "persondoesnotexist",
|
||||
data: { alias, tribeid },
|
||||
};
|
||||
}
|
||||
person.email = persondata.recoveryauth.email;
|
||||
person.publickey = persondata.recoveryauth.publickey;
|
||||
person.privatekey = persondata.recoveryauth.privatekey;
|
||||
person.passphrase = persondata.recoveryauth.passphrase;
|
||||
} else {
|
||||
person.email = email;
|
||||
person.passphrase = passphrase;
|
||||
person.publickey = publickey;
|
||||
}
|
||||
console.log("person:", person);
|
||||
|
||||
//feedback.withemail = true;
|
||||
//feedback.email = email;
|
||||
//feedback.privatekey = privatekey;
|
||||
//feedback.passphrase = passphrase;
|
||||
const mailidentity = {
|
||||
subjecttpl: "Information pour l'alias: {{alias}}",
|
||||
htmltpl:
|
||||
"<h1>Votre identité {{alias}} via {{tribeid}}</h1><p>Passphrase:</p></p><p>{{{passphrase}}</p><p>Cle public:</p><p>{{{publickey}}</p><p>Cle privée</p><p>{{{privatekey}}</p>",
|
||||
texttpl:
|
||||
"Votre identité {{alias}}\nPassphrase:\n{{{passphrase}}\nCle public:\n{{{publickey}}\nCle privée\n{{{privatekey}}",
|
||||
filelist: [],
|
||||
};
|
||||
|
||||
const maildata = {
|
||||
To: person.email,
|
||||
subject: Mustache.render(mailidentity.subject, person),
|
||||
htmlpart: Mustache.render(mailidentity.htmltpl, person),
|
||||
textpart: Mustache.render(mailidentity.texttpl, person),
|
||||
filelist: [],
|
||||
};
|
||||
fs.outputFileSync(
|
||||
`${conf.dirtown}/tmp/${person.alias}_privatekey.txt`,
|
||||
person.privatekey,
|
||||
"utf8"
|
||||
);
|
||||
maildata.filelist.push({
|
||||
filename: "${person.alias}_privatekey.txt",
|
||||
pathfile: `${conf.dirtown}/tmp/${person.alias}_privatekey.txt`,
|
||||
});
|
||||
fs.outputFileSync(
|
||||
`${conf.dirtown}/tmp/${person.alias}_publickey.txt`,
|
||||
person.publickey,
|
||||
"utf8"
|
||||
);
|
||||
maildata.filelist.push({
|
||||
filename: "${person.alias}_publickey.txt",
|
||||
pathfile: `${conf.dirtown}/tmp/${person.alias}_publickey.txt`,
|
||||
});
|
||||
//fs.readJSONSync('${conf.dirapi}/api/')
|
||||
return Notifications.sendmail(maildata, tribeid);
|
||||
};
|
||||
|
||||
Pagans.authenticatedetachedSignature = async (
|
||||
alias,
|
||||
@@ -185,6 +306,15 @@ Pagans.authenticatedetachedSignature = async (
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* todo recuperer tous les tokens et les distribuer à la town
|
||||
* @param {string} alias
|
||||
*/
|
||||
Pagans.deletealias = (alias) => {
|
||||
// check if alias is used in the blockchain
|
||||
// if not then delete itm pagan alias => this means that alias is now available for someone else
|
||||
};
|
||||
Pagans.deleteperson = (alias, tribeId) => {};
|
||||
Pagans.keyrecovery = (tribeid, email) => {
|
||||
glob
|
||||
.GlobSync(`${conf.dirtown}/tribes/${tribeId}/Person/*.json`)
|
||||
|
57
api/models/Trackings.js
Normal file
57
api/models/Trackings.js
Normal file
@@ -0,0 +1,57 @@
|
||||
/**
|
||||
* Tracking management:
|
||||
*
|
||||
* without header:
|
||||
* https://dns.xx/trk/pathtofile?alias=anonymous&uuid=1b506f71-1bff-416c-9057-cb8b86296f60&src=btnregister&version=1&lg=fr
|
||||
*
|
||||
* with header
|
||||
* https://dns.xx/trk/pathtofile?src=btnregister&version=1
|
||||
*
|
||||
* where pathtofile is a ressource accessible from https://dns.xx/pathtofile
|
||||
*
|
||||
* We get :
|
||||
* alias: if athenticated from header else anonymous
|
||||
* uuid: a uuid v4 générate the first time a web page is open on a browser
|
||||
* src: source action that trig this get
|
||||
* version: can be an int, date or any version of the src
|
||||
* tm: optionnal is a timestamp of action when it is not immediate (offline app)
|
||||
*
|
||||
* html usage to track a loading page or email when a picture is load
|
||||
* using apxwebapp in /src/ we got:
|
||||
* <img src="static/img/photo.jpg" data-trksrckey="loadpage" data-version="1">
|
||||
*
|
||||
* using html + apx.js (or at least with header {xalias,xuuid,xlang})
|
||||
* <img lazysrc="trk/static/img/photo.jpg data-trksrckey="loadpage" data-version="1">
|
||||
*
|
||||
*
|
||||
* in js action:
|
||||
* <button></button> or
|
||||
* <a data-trksrc="linktoblabla" href='https:..'
|
||||
* onclick="apx.trackvisit("btnaction",1);actionfct();">
|
||||
* </a>
|
||||
* will hit an eventlistener
|
||||
* axios.get("https://dns.xx/trk/cdn/empty.json?alias=anonymous&uuid=1b506f71-1bff-416c-9057-cb8b86296f60&src=btnregister&version=1");
|
||||
*
|
||||
*
|
||||
* or if no js available (example:email or pdf document)
|
||||
* <img src="https://dns.xx/trk/static/img/photo.jpg?alias=anonymous&uuid=1b506f71-1bff-416c-9057-cb8b86296f60&srckey=loadpage&version=1" will hit a tracker
|
||||
*
|
||||
* <a href="https://dns.xx/trk/redirect?alias=anonymous&uuid=1b506f71-1bff-416c-9057-cb8b86296f60&srckey=loadpage&version=1&url=http://..." will hit a tracker then redirect to url></a> *
|
||||
*
|
||||
*
|
||||
* if you use apx.js :
|
||||
* in html add in <button>, <img>, <a> tag data-trksrc="srckey"
|
||||
* <img src="https://dns.xx/static/img/photo.jpg" data-trkversion="1" data-trksrckey="registerform">
|
||||
* <button data-trksrc="https://dns.xx/static/img/photo.jpg" data-trkversion="1" data-trksrckey="registerform">
|
||||
* in js call apx.track(srckey);
|
||||
*
|
||||
* Tracking log into tribe/logs/nginx/tribe_appname.trk.log
|
||||
* Src have to be manage in tribe/api/models/lg/src_en.json
|
||||
* {"srckey":{
|
||||
* "app":"presentation|app|apptest",
|
||||
* "title":"",
|
||||
* "description":""
|
||||
* }
|
||||
* }
|
||||
*
|
||||
*/
|
@@ -15,6 +15,33 @@ const Checkjson = require( `./Checkjson.js`);
|
||||
/*
|
||||
tribeid manager
|
||||
|
||||
@TODO @STUDY
|
||||
|
||||
To add a tribe in dirtown/tribes with a mayor phil
|
||||
see man adduser and file reference call skelet directory to set an env for apxtrib in /home/tribename/
|
||||
accessible by tribename/password
|
||||
then add group group me to phil to allow phil to ate a symlink /dirtown/tribes/tribename => to /home/tribename
|
||||
|
||||
At each reboot run a process to analyse /api/routes and api/models whre only js can be exexuted are safe (only write data into /home/tribename, never outside)
|
||||
|
||||
1- Create a user in linux with $ sudo useradd smatchit
|
||||
2 => this create a user:group and a folder smatchit in /home/phil/dirtown/tribes/
|
||||
2 => add group smatchit to phil to allow phil to access file with a group accessright
|
||||
3 set a password if needed "$sudo passwd smatchit" (sm@tchit) to smatchit to make it available from ssh on port 22
|
||||
4
|
||||
4 to delete a user sudo userdel smatchit (this keep folder smatchit to remove folder smatchit => sudo userdel --remove smacthit)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/tribes/tribeid
|
||||
Manage a tribeid space
|
||||
* create
|
||||
|
@@ -1,4 +1,5 @@
|
||||
{
|
||||
"validcheck":"Your data are valid",
|
||||
"typedoesnnotexistinschema":"This type in your propertie is not manage by Checkjson.js",
|
||||
"dataerrpropertie":"Check your data that not fit your schema rules propertie",
|
||||
"dataerrpropertiesrequired":"This propertie is required and not present in your data"
|
||||
|
6
api/models/lg/Checkjson_fr.json
Normal file
6
api/models/lg/Checkjson_fr.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"validcheck":"Your data are valid",
|
||||
"typedoesnnotexistinschema":"This type in your propertie is not manage by Checkjson.js",
|
||||
"dataerrpropertie":"Check your data that not fit your schema rules propertie",
|
||||
"dataerrpropertiesrequired":"This propertie is required and not present in your data"
|
||||
}
|
9
api/models/lg/Notifications_fr.json
Normal file
9
api/models/lg/Notifications_fr.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"missingconf":"Il manque un smtp/sms valide pour {{tribe}} ou sur le serveur /conf.json",
|
||||
"missingdata":"Il manque des données obligatoire dans data {{#missingk}} {{.}} {{/missingk}}",
|
||||
"missingfile":"Le ou les fichiers suivants n'existent pas {{#missingfile}} {{.}} {{/missingfile}}",
|
||||
"errsendmail":"Une erreur s'est produite lors de l'envoie de l'email",
|
||||
"successfullsentemail":"Email correctement envoyé",
|
||||
"errsendsms":"Une erreur s'est produite lors de l'envoie du sms",
|
||||
"successfullsentsms":"Sms bien envoyé à {{To}}"
|
||||
}
|
13
api/models/lg/Odmdb_fr.json
Normal file
13
api/models/lg/Odmdb_fr.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"alreadyexist": "Un object {{objectname}} avec la clé {{key}} existe déjà avec {{val}}",
|
||||
"doesnotexist": "L'object {{objectname}} avec la clé {{key}} ,'existe pas avec {{val}}",
|
||||
"getschema": "Schema {{{conf.name}}}",
|
||||
"schemanotfound": "Schema introuvable dans {{{schemaPath}}}",
|
||||
"pathnamedoesnotexist": "Le repertoire n'existe pas {{{indexpath}}}",
|
||||
"objectfiledoesnotexist": "Le fichier n'exuiste pas {{{objectpath}}}",
|
||||
"cudsuccessfull": "Mise à jour effectuée avec succés",
|
||||
"misssingprimarykey": "Il manque une clé primaire apxid pour stocker et identifier les objects",
|
||||
"unconsistencyapxidx": "L'index {{name}} doit contenir en objkey au moins {{apxid}} car keyval n'est pas unique",
|
||||
"profilnotallow": "Vous n'avez pas le profil de {{profils}}, cette action n'est pas authorisée",
|
||||
"successreindex": "Objet reindexé à partir des items, vos index sont à jour"
|
||||
}
|
@@ -1,5 +1,8 @@
|
||||
{
|
||||
"successfullcreate": "Alias creation for {{alias}} successfull. {{#withemail}} An email was sent to {{email}}, if you do not receive it, please download your keys before living this page.{{/withemail}}",
|
||||
"successfulluppdate": "Your alias as a Person is now update into {{tribe}}",
|
||||
"tribedoesnotexist": "Your tribe {{tribe}} does not exist in this town"
|
||||
"aliasexist":"This alias {{alias]} exist",
|
||||
"aliasdoesnotexist":"This alias {{alias}} does not exist ",
|
||||
"personexist":"This person {{alias}} exist for {{tribeid}}",
|
||||
"successfullcreate": "This identity {{alias}} creation was successfull. {{#withemail}} An email was sent to {{email}}, if you do not receive it, please download your keys before living this page.{{/withemail}}",
|
||||
"successfulluppdate": "Your alias as a Person is now update into {{tribeid}}",
|
||||
"tribedoesnotexist": "Your tribe {{tribeid}} does not exist in this town"
|
||||
}
|
||||
|
@@ -1,41 +1,13 @@
|
||||
{
|
||||
"ERRcritical": "Erreur critique",
|
||||
"loginAlreadyExist": "Ce login exist déjà",
|
||||
"emailAlreadyExist":"Cet email exist déjà",
|
||||
"failtoWritefs":"Impossible d'ecrire sur le serveur",
|
||||
"successfullCreate": "Création réussit",
|
||||
"successfullDelete": "Mise à jour effectuée",
|
||||
"serverNeedAuthentification":"Ce serveur a besoin d'une authentification",
|
||||
"forbiddenAccess":"Accès interdit",
|
||||
"userNotAllowtoCreate":"Pas d'autorisation de creation",
|
||||
"userNotAllowtoUpdate":"Pas d'autorisatiuon de mise à jour",
|
||||
"userNotAllowtoDelet":"Pas d'autorisation de suppression",
|
||||
"uuidNotFound":"Le paîen {{uuid}} n'existe pas dans la tribu {{tribeName}}",
|
||||
"useremailNotfound":"Email introuvable",
|
||||
"loginDoesNotExist":" Login introuvable",
|
||||
"checkCredentials":" Vérifier vos parametres d'accès"
|
||||
"wrongPassword":"Vérifier votre mot de passe",
|
||||
"invalidData":"Vérifier vos données",
|
||||
"pswToosimple":"Votre mot de passe est trop simple, doit contenir au moins 8 caractères avec des lettres majusculmes, minuscules des nombres et au moins un caractere special @! ...",
|
||||
"ERRemail":"Vérifier votre email",
|
||||
"ERRnewnewbisdiff":"Les 2 mots de passe ne sont pas identique",
|
||||
"uuiddesc":"Identifiant",
|
||||
"uuiddesclong":"Identifiant unique au format UUID.v4()",
|
||||
"uuidinfo":"<p> L'usage d'UUID v4 permet de générer un code unique sans centralisation, car il est basé sur un timestamp et une clé crypto ce qui donne un code du type 7d8291c0-e137-11e8-9f7b-1dc8e57bed33 </p>",
|
||||
"logindesc":"login",
|
||||
"logininfo":"<p>Le login doit être unique sur une instance d'apxtrib.</p><p> Pour échanger en dehors d'une instance apxtrib on utilise la clé public du user ou pour un humain login@trib.town§.nation.xx avec le nom du domaine qui heberge l'instance</p><p> Ou encore login@domain.xx tout domain.xx utilisé pour heberger un espace web client /tribeid/www/</p>",
|
||||
"biographydesc":"Vous en quelques mots",
|
||||
"publickeyinfo":"<p>Cette clé est générée par votre navigateur, garder précisuesement votre clé privée que seule vous connaissez. En cas de perte de cette clé tous vos actifs seront perdus.</p><p>Cette méthode nous permet de vous garantir un contrôle total décentralisé.</p>",
|
||||
"imgavatardesc":"Changer votren avatar",
|
||||
"imgavatarinfo":"Pour un meilleur rendu, une mage carré de 128pc en foat jpg",
|
||||
"emaildesc":"Email",
|
||||
"telephonedesc":"Tel",
|
||||
"familyNamedesc":"Nom",
|
||||
"givenNamedesc":"Prénom",
|
||||
"additionalNamedesc":"Pseudo",
|
||||
"additionalNamesinfo":"<p>Nom avec lequel vous souhaitez qu'on vous reconnaisse sur l'instance de l'apxtrib </p><p>Attention ce nom n'est unique que sur une instance d'apxtrib. Un même speudo peut-être utilisé sur un autre serveur pour garantir l'identité vérifié pseudo@ domaine de rattachement.</p>",
|
||||
"dtcreatedesc":"Date de creation",
|
||||
"dtupdatedesc":"Dernière mise à jour",
|
||||
"dtlastlogindesc":"Dernier accès au login",
|
||||
"accessrightsdesc":"Vos droits d'accès"
|
||||
}
|
||||
"aliasexist": "Cet alias {{data.alias}} existe",
|
||||
"emailerr": "Verifier votre email",
|
||||
"aliasorprivkeytooshort": "Vérifiez votre alias et votre clé privée",
|
||||
"aliasdoesnotexist": "Cet alias {{data.alias}} n'existe pas",
|
||||
"personexist": "Cette personne {{data.alias}} existe pour {{data.tribeid}}",
|
||||
"persondoesnotexist": "Cette personne {{data.alias}} n'existe pas pour {{data.tribeid}}",
|
||||
"successfullcreate": "La création de cette identité {{data.alias}} a été un succès. {{#data.withemail}} Un email a été envoyé à {{data.email}}, si vous ne le recevez pas, veuillez télécharger vos clés avant de quitter cette page.{{/data.withemail}}",
|
||||
"successfulcreatewithoutemail": "La creation de data.alias}} a été un succès. Aucun email ,'a été envoyé, verifier bien que vos clés sont bien sauvegardé de votre coté",
|
||||
"successfulluppdate": "Votre alias en tant que Personne est maintenant mis à jour dans {{data.tribeid}}",
|
||||
"errcreate": "Desolé, un probléme inconnu empeche la creation",
|
||||
"logout": "Votre token a été supprimé du server"
|
||||
}
|
||||
|
3
api/models/lg/Tribes_fr.json
Normal file
3
api/models/lg/Tribes_fr.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"actionmissing":"L'action {{data.action}} n'existe pas pour la tribut {{data.tribe}}."
|
||||
}
|
8
api/models/lg/middlewares_en.json
Normal file
8
api/models/lg/middlewares_en.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"errrequest": "Backend seems not available",
|
||||
"missingheader": "Some header miss to have a valid request: {{#data}} {{.}} {{/data}}",
|
||||
"tribeiddoesnotexist": "Header xtribe: {{data.xtribe}} does not exist in this town you cannot access",
|
||||
"authenticated": "Your alias{{{data.xalias}}} is authenticated",
|
||||
"notauthenticated": "Your alias: {{data.xalias}} is not authenticated {{^data.aliasexists}} and this alias does not exist !{{/data.aliasexists}}",
|
||||
"forbiddenAccessright": "Alias {{data.xalias}} has not access right to act {{data.action}} onto object {{data.object}} for tribe {{mor.xworkon}}"
|
||||
}
|
10
api/models/lg/middlewares_fr.json
Normal file
10
api/models/lg/middlewares_fr.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"errrequest": "Le serveur ne semble pas répondre",
|
||||
"unconsistentpgp": "Vos clés ne sont pas conforme {{err}}",
|
||||
"missingheader": "Certains en-têtes manquent pour avoir une requête valide : {{#data}} {{.}} {{/data}}",
|
||||
"tribeiddoesnotexist": "L'en-tête xtribe : {{data.xtribe}} n'existe pas dans cette ville, vous ne pouvez pas y accéder",
|
||||
"authenticated": "Votre alias {{{data.xalias}}} est authentifié",
|
||||
"notauthenticated": "Votre alias : {{data.xalias}} n'est pas authentifié {{^data.aliasexists}} et cet alias n'existe pas !{{/data.aliasexists}}",
|
||||
"forbiddenAccessright": "L'alias {{data.xalias}} n'a pas le droit d'agir {{data.action}} sur l'objet {{data.object}} pour la tribu {{mor.xworkon}}",
|
||||
"signaturefailled": "Desolé votre signature n'est pas valide pour cet alias."
|
||||
}
|
@@ -3,6 +3,7 @@ Unit testing
|
||||
*/
|
||||
const assert = require("assert");
|
||||
const Checkjson = require("../Checkjson.js");
|
||||
const conf = require(`${process.env.dirtown}/conf.json`);
|
||||
|
||||
const ut = { name: "Checkjson" };
|
||||
|
||||
@@ -17,115 +18,120 @@ const schema = {
|
||||
},
|
||||
};
|
||||
const testproperties = [
|
||||
{
|
||||
name: "test0",
|
||||
data: { totest: true },
|
||||
properties: { totest: { type: "boolean" } },
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
name: "test0",
|
||||
data: { totest: true },
|
||||
properties: { totest: { type: "boolean" } },
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test1",
|
||||
data: { totest: "blabla" },
|
||||
properties: { totest: { type: "string" } },
|
||||
status: 200
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test2",
|
||||
data: { totest: 123 },
|
||||
properties: { totest: { type: "string" } },
|
||||
status: 417
|
||||
status: 417,
|
||||
},
|
||||
{
|
||||
name: "test3",
|
||||
data: { totest: 123.13 },
|
||||
properties: { totest: { type: "integer" } },
|
||||
status: 417
|
||||
status: 417,
|
||||
},
|
||||
{
|
||||
name: "test4",
|
||||
data: { totest: 123 },
|
||||
properties: { totest: { type: "number" } },
|
||||
status: 200
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test5",
|
||||
data: { totest: 12312 },
|
||||
properties: { totest: { type: "number" } },
|
||||
status: 200
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test6",
|
||||
data: { totest: 12.313 },
|
||||
properties: { totest: { type: "float" } },
|
||||
status: 200
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test7",
|
||||
data: { totest: "blablab sfde" },
|
||||
data: { totest: "blablab sfde" },
|
||||
properties: { totest: { type: "string", minLength: 1111 } },
|
||||
status: 417
|
||||
status: 417,
|
||||
},
|
||||
{
|
||||
name: "test8",
|
||||
data: { totest: "blablab sfde" },
|
||||
properties: { totest: { type: "string", minLength: 4, maxLength: 128} },
|
||||
status: 200
|
||||
data: { totest: "blablab sfde" },
|
||||
properties: { totest: { type: "string", minLength: 4, maxLength: 128 } },
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test9",
|
||||
data: { totest: 12 },
|
||||
properties: { totest: { type: "integer", multipleOf:3} },
|
||||
status: 200
|
||||
data: { totest: 12 },
|
||||
properties: { totest: { type: "integer", multipleOf: 3 } },
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test10",
|
||||
data: { totest: 9 },
|
||||
properties: { totest: { type: "number", minimum:-10, exclusiveMaximum:10} },
|
||||
status: 200
|
||||
data: { totest: 9 },
|
||||
properties: {
|
||||
totest: { type: "number", minimum: -10, exclusiveMaximum: 10 },
|
||||
},
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test11",
|
||||
data: { totest: 10 },
|
||||
properties: { totest: { type: "number", minimum:-10, exclusiveMaximum:10} },
|
||||
status: 417
|
||||
data: { totest: 10 },
|
||||
properties: {
|
||||
totest: { type: "number", minimum: -10, exclusiveMaximum: 10 },
|
||||
},
|
||||
status: 417,
|
||||
},
|
||||
{
|
||||
name: "test12",
|
||||
data: { totest: "gfhrtabcdgfr" },
|
||||
properties: { totest: { type: "string", pattern:/.*abc.*/} },
|
||||
status: 200
|
||||
data: { totest: "gfhrtabcdgfr" },
|
||||
properties: { totest: { type: "string", pattern: /.*abc.*/ } },
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test13",
|
||||
data: { totest: "toto@google.com" },
|
||||
properties: { totest: { type: "string", format:"email"} },
|
||||
status: 200
|
||||
data: { totest: "toto@google.com" },
|
||||
properties: { totest: { type: "string", format: "email" } },
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test14",
|
||||
data: { totest: "Aze123@0" },
|
||||
properties: { totest: { type: "string", format:"password"} },
|
||||
status: 200
|
||||
data: { totest: "Aze123@0" },
|
||||
properties: { totest: { type: "string", format: "password" } },
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test15",
|
||||
data: { totest: "value1" },
|
||||
properties: { totest: { type: "string", enum:["value1","value2","value3"]} },
|
||||
status: 200
|
||||
data: { totest: "value1" },
|
||||
properties: {
|
||||
totest: { type: "string", enum: ["value1", "value2", "value3"] },
|
||||
},
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
name: "test16",
|
||||
data: { totest: ["t1","t2"] },
|
||||
properties: { totest: { type: ["string", "number"] }},
|
||||
status: 417
|
||||
}
|
||||
,
|
||||
data: { totest: ["t1", "t2"] },
|
||||
properties: { totest: { type: ["string", "number"] } },
|
||||
status: 417,
|
||||
},
|
||||
{
|
||||
name: "test17",
|
||||
data: { totest: 12 },
|
||||
properties: { totest: { type: ["string", "number"] }},
|
||||
status: 200
|
||||
}
|
||||
properties: { totest: { type: ["string", "number"] } },
|
||||
status: 200,
|
||||
},
|
||||
];
|
||||
|
||||
ut.testproperties = (options) => {
|
||||
@@ -134,10 +140,10 @@ ut.testproperties = (options) => {
|
||||
schema.properties = t.properties;
|
||||
const res = Checkjson.schema.data(schema, t.data);
|
||||
if (res.status != t.status) {
|
||||
msg = (msg == "") ? "Unconsistent testproperties() name list: " : `${msg},`;
|
||||
msg = msg == "" ? "Unconsistent testproperties() name list: " : `${msg},`;
|
||||
if (options.verbose) {
|
||||
console.log(t)
|
||||
console.log(res);
|
||||
console.log(t);
|
||||
console.log(res);
|
||||
}
|
||||
msg += res.err.map((e) => ` ${t.name} ${e.info}`);
|
||||
}
|
||||
|
@@ -2,71 +2,42 @@
|
||||
Unit testing
|
||||
*/
|
||||
const assert = require("assert");
|
||||
const fs=require('fs-extra');
|
||||
const path= require('path');
|
||||
const fs = require("fs-extra");
|
||||
const path = require("path");
|
||||
const Odmdb = require("../Odmdb.js");
|
||||
const {generemdp} = require('../../nationchains/socialworld/contracts/toolsbox.js');
|
||||
const { generemdp } = require("../toolsbox.js");
|
||||
const conf = require(`${process.env.dirtown}/conf.json`);
|
||||
|
||||
const ut = { name: "Odmdb" };
|
||||
/*
|
||||
We test only search and indexation here
|
||||
Create Update Read and Delete are unit testing with specificities of each Object.
|
||||
|
||||
To do that we create in tmp a dummy data folder for a dummy schema object
|
||||
Test crud process for any object
|
||||
*/
|
||||
const schema = {
|
||||
$schema: "http://json-schema.org/schema#",
|
||||
title: "Dummy schema to test Checkjson.js",
|
||||
description: "Checkjson is use on server as well as into a browser",
|
||||
$comment: "We change schema type on the fly to simplify the test",
|
||||
type: "object",
|
||||
properties: {
|
||||
uuid: {
|
||||
type:"string",
|
||||
format:"uuid",
|
||||
default:"=uuid.v4()"
|
||||
},
|
||||
dtcreate:{
|
||||
type:"string",
|
||||
format:"datetime",
|
||||
default:"=date.now()"
|
||||
},
|
||||
tag:{
|
||||
type:"string",
|
||||
enum:["t1","t2","t3"],
|
||||
default:"t1"
|
||||
},
|
||||
info:{
|
||||
type:"string",
|
||||
minLength: 10,
|
||||
default:"=generemdp(255,'ABCDEFGHIJKLM 12340')"
|
||||
}
|
||||
},
|
||||
required:["uuid"],
|
||||
apxprimarykey:"uuid",
|
||||
apxuniquekey:["info"],
|
||||
apxsearchindex:{
|
||||
"uuid":{"list":[],"taginfo":['tag','info'],"all":""},
|
||||
"info":{"uuid":['uuid']}
|
||||
}
|
||||
};
|
||||
ut.crud = (objectPathname, itm, profils) => {
|
||||
//
|
||||
// test if exist
|
||||
// if not test create
|
||||
// test to read
|
||||
// test update
|
||||
// test delete
|
||||
const res = { status: 200, err: [] };
|
||||
return res;
|
||||
};
|
||||
|
||||
const obj={tag:"t1",info:"Lorem ipsum A"}
|
||||
const testvar={alias:"tutu", passphrase:"",privatekey:"", publickey:""}
|
||||
|
||||
ut.createanobject=(schema,obj)=>{
|
||||
const testitms=[
|
||||
{objectPathname:`${conf.dirapi}/nationchains/pagans`,
|
||||
itm:{alias:'toutou', publickey:}}
|
||||
]
|
||||
|
||||
const res={status:200,err:[]}
|
||||
return res
|
||||
}
|
||||
|
||||
ut.run = (options) => {
|
||||
const objectPath=path.resolve(__dirname,'../../tmp/testobjects');
|
||||
const schemaPath=path.resolve(__dirname,'../../tmp/testschema');
|
||||
if (!fs.existsSync(objectPath)) fs.ensureDirSync(objectPath);
|
||||
if (!fs.existsSync(schemaPath)) fs.ensureDirSync(schemaPath);
|
||||
const createenvobj=Odmdb.setObject(schemaPath,objectPath,"objtest",schema,{},"en");
|
||||
assert.deepEqual(createenvobj,{status:200},JSON.stringify(createenvobj));
|
||||
const checkschema= Odmdb.schema(schemaPath,"objtest",true)
|
||||
assert.deepEqual(checkschema.status,200,JSON.stringify(checkschema))
|
||||
};
|
||||
module.exports = ut;
|
||||
|
||||
ut.run = (options) => {
|
||||
let msg=""
|
||||
testitms.forEach(i=>{
|
||||
ut.crud(i)
|
||||
//si erreur add msg+++
|
||||
})
|
||||
assert.deepEqual(msg, "", msg);
|
||||
};
|
||||
module.exports = ut;
|
||||
|
Reference in New Issue
Block a user