full code

This commit is contained in:
2023-12-07 12:04:19 +01:00
parent 484d2ad22a
commit 7656f57a11
45 changed files with 9659 additions and 0 deletions

364
models/Checkjson.js Executable file
View File

@@ -0,0 +1,364 @@
/*
This module have to be use in back as well front
can be include in project with
- >into a browser : <script src="https://townName.nationName.dns/nationchains/contracts/Checkjson.js"></script>
- into a node.js : const Checkjson = require( `../nationchains/socialworld/contracts/Checkjson.js`);
*/
// --##
const Checkjson = {};
Checkjson.schema = {};
Checkjson.schema.properties = {};
Checkjson.schema.properties.type = {};
Checkjson.schema.properties.type.string = (str) => typeof str === "string";
Checkjson.schema.properties.type.array = (val) => Array.isArray(val);
Checkjson.schema.properties.type.object = (val) => typeof val === 'object' && val !== null && !Array.isArray(val);
Checkjson.schema.properties.type.number = (n) => typeof n === "number";
Checkjson.schema.properties.type.boolean = (n) => typeof n === "boolean";
Checkjson.schema.properties.type.integer = (n) =>
n != "" && !isNaN(n) && Math.round(n) == n;
Checkjson.schema.properties.type.float = (n) =>
n != "" && !isNaN(n) && Math.round(n) != n; //not yet in json schema
Checkjson.schema.properties.minLength = (str, min) =>
typeof str === "string" && str.length > parseInt(min);
Checkjson.schema.properties.maxLength = (str, max) =>
typeof str === "string" && str.length < parseInt(max);
Checkjson.schema.properties.multipleOf = (n, val) =>
typeof n === "number" &&
typeof val === "number" &&
parseFloat(n) / parseFloat(val) -
Math.round(parseFloat(n) / parseFloat(val)) <
0.0000001;
Checkjson.schema.properties.range = (
n,
minimum,
exclusiveMinimum,
maximum,
exclusiveMaximum
) => {
//console.log(minimum,exclusiveMinimum,maximum, exclusiveMaximum,n)
if (typeof n !== "number") return false;
if (minimum && parseFloat(n) < parseFloat(minimum)) return false;
if (exclusiveMinimum && parseFloat(n) <= parseFloat(exclusiveMinimum))
return false;
if (maximum && parseFloat(n) > parseFloat(maximum)) return false;
if (exclusiveMaximum && parseFloat(n) >= parseFloat(exclusiveMaximum))
return false;
return true;
};
Checkjson.schema.properties.pattern = (str, pattern) => {
try {
pattern = new RegExp(pattern);
} catch (e) {
console.log("err pattern in checkjon", pattern);
return false;
}
return pattern.test(str);
};
Checkjson.schema.properties.enum = (str, enumvalues) => {
if (Array.isArray(enumvalues)) {
return typeof str === "string" && enumvalues.includes(str);
} else if (tribeId) {
//enumvalues is a reference of objectname.key
const { tribeId, obj, keyid } = enumvalues.split(".");
return fs.existsSync(
`../../nationchains/tribes/${tribeId}/schema/${obj}/itm/${keyid}.json`
);
} else {
return true;
}
};
// to check a value for a pattern
// Checkjson.schema.properties.pattern(value, properties[p].pattern)
/**
*
* @param {string} str to test
* @param {string} format keyworkd existing in Checkjson.schema.properties.format
* @return null if format does not exist, true or false
*/
Checkjson.testformat=(str, format)=>{
if (!Checkjson.schema.properties.format[format]) { return null}
return Checkjson.schema.properties.pattern(str, Checkjson.schema.properties.format[format])
}
// see format https://json-schema.org/understanding-json-schema/reference/string.html#format
// to check a just value with a format use Checkjson.testformat=(value, format)
Checkjson.schema.properties.format = {
"date-time": /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-5]\d\.\d{1,3}/,
stringalphaonly: /^[A-Za-z0-9]{3,}$/,
time: /[0-2]\d:[0-5]\d:[0-5]\d\.\d{1,3}/,
date: /\d{4}-[01]\d-[0-3]\d/,
duration: / /,
email:
/^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/,
"idn-email": / /,
uuid: /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/,
uri: / /,
"uri-reference": / /,
iri: / /,
hostname: / /,
"idn-hostname": / /,
ipv4: /^([09]{1,3}.){3}.([09]{1,3})$/,
ipv6: /^((([09A-Fa-f]{1,4}:){7}[09A-Fa-f]{1,4})|(([09A-Fa-f]{1,4}:){6}:[09A-Fa-f]{1,4})|(([09A-Fa-f]{1,4}:){5}:([09A-Fa-f]{1,4}:)?[09A-Fa-f]{1,4})|(([09A-Fa-f]{1,4}:){4}:([09A-Fa-f]{1,4}:){0,2}[09A-Fa-f]{1,4})|(([09A-Fa-f]{1,4}:){3}:([09A-Fa-f]{1,4}:){0,3}[09A-Fa-f]{1,4})|(([09A-Fa-f]{1,4}:){2}:([09A-Fa-f]{1,4}:){0,4}[09A-Fa-f]{1,4})|(([09A-Fa-f]{1,4}:){6}((b((25[05])|(1d{2})|(2[04]d)|(d{1,2}))b).){3}(b((25[05])|(1d{2})|(2[04]d)|(d{1,2}))b))|(([09A-Fa-f]{1,4}:){0,5}:((b((25[05])|(1d{2})|(2[04]d)|(d{1,2}))b).){3}(b((25[05])|(1d{2})|(2[04]d)|(d{1,2}))b))|(::([09A-Fa-f]{1,4}:){0,5}((b((25[05])|(1d{2})|(2[04]d)|(d{1,2}))b).){3}(b((25[05])|(1d{2})|(2[04]d)|(d{1,2}))b))|([09A-Fa-f]{1,4}::([09A-Fa-f]{1,4}:){0,5}[09A-Fa-f]{1,4})|(::([09A-Fa-f]{1,4}:){0,6}[09A-Fa-f]{1,4})|(([09A-Fa-f]{1,4}:){1,7}:))$/,
telephonefr: /^0[1-9][0-9]{9}$/,
telephoneinter: /^\+*(\d{3})*[0-9,\-]{8,}/,
password:
/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[$@$!%*?&.])[A-Za-z\d$@$!%*?&.{}:|\s]{8,}/,
postalcodefr: /(^\d{5}$)|(^\d{5}-\d{4}$)/,
pgppublickey:
/^-----BEGIN PGP PUBLIC KEY BLOCK-----(\n|\r|\r\n)(\n|\r|\r\n)([0-9a-zA-Z\+\/=]*(\n|\r|\r\n))*-----END PGP PUBLIC KEY BLOCK-----(\n|\r|\r\n)?$/gm,
pgpprivatekey:
/^-----BEGIN PGP PRIVATE KEY BLOCK-----(\n|\r|\r\n)(\n|\r|\r\n)([0-9a-zA-Z\+\/=]*(\n|\r|\r\n))*-----END PGP PRIVATE KEY BLOCK-----(\n|\r|\r\n)?$/gm,
};
Checkjson.schema.properties.default;
Checkjson.schema.validation = (schema) => {
/*validate a schema structure*/
const multimsg = [];
const res = {};
if (schema.properties) {
Object.keys(schema.properties).forEach((p) => {
const properties = schema.properties;
if (
properties[p].type &&
typeof properties[p].type === "string" &&
!Checkjson.schema.properties.type[properties[p].type]
) {
multimsg.push({
ref: "Checkjson",
msg: "schemaerrtypedoesnotexist",
data: { propertie: p, type: properties[p].type },
});
}
if (
properties[p].type &&
typeof properties[p].type === "object"){
if (properties[p]['$ref']){
//This is manage by Odmdb.schema to load recursively complex schema
multimsg.push({
ref: "Checkjson",
msg: "externalrefnotload",
data: { propertie: p, ref: properties[p]["$ref"]},
});
}
//case type=="object" with properties
if (properties[p].properties){
const checksub = Checkjson.schema.validation(properties[p])
if (checksub.status!=200){
multimsg = multimsg.concat(checksub.multimsg)
}
}
// if not $ref or no properties then any object is accepted
}
if (
properties[p].format &&
!Checkjson.schema.properties.format[properties[p].format]
) {
multimsg.push({
ref: "Checkjson",
msg: "schemaerrformatdoesnotexist",
data: { propertie: p, format: properties[p].format },
});
}
if (properties[p].enum && !Array.isArray(properties[p].enum)) {
multimsg.push({
ref: "Checkjson",
msg: "schemaerrenumnotarray",
data: { propertie: p, enum: properties[p].enum },
});
}
});
}
// 406 means not acceptable
if (multimsg.length > 0) {
res.status = 406;
res.multimsg = multimsg;
} else {
res.status = 200;
res.ref = "Checkjson";
res.msg = "validcheck";
}
return res;
};
/**
* Check data with a schema
*
* @param {object} schema a json schema
* @param {*} data some data to check using schema
* @param {*} withschemacheck boolean that force a schema check (usefull on modification schema)
* @returns {status: 200, ref:"Checkjson", msg:"validcheck", data:{itm:object}}
* {status:417, multimsg:[{re,msg,data}],data:{itm:object}}
*/
Checkjson.schema.data = (schema, data, withschemacheck) => {
/* validate a data set with a schema in a context ctx */
/*
console.log('#################')
console.log(schema);
console.log('---------')
console.log(data)
*/
const propertiescheck=(properties,subdata)=>{
// properties ={prop1:{type,format},prop2:{type:object,...}}
// subdata={prop1,prop2}
// Return [] => no error, else 1 item per error {msg,ref:checkjson,data}
let multimsg=[]
Object.keys(properties).forEach((p) => {
//type is mandatory in a propertie
if (subdata[p]) {
if (properties[p].properties){
//means it is a subobject
multimsg=multimsg.concat(propertiescheck(properties[p].properties,subdata[p]))
}
//type can be a list of string; number, array, boolean, object, null
const typlist =
properties[p].type && typeof properties[p].type === "string"
? [properties[p].type]
: properties[p].type;
let valid = false;
typlist.forEach((typ) => {
// at least one test have to be valid
if (Checkjson.schema.properties.type[typ](subdata[p])) valid = true;
});
if (!valid)
multimsg.push({
ref: "Checkjson",
msg: "dataerrpropertie",
data: { key: p, value: subdata[p] },
});
if (
properties[p].minLength &&
!Checkjson.schema.properties.minLength(
subdata[p],
properties[p].minLength
)
) {
multimsg.push({
ref: "Checkjson",
msg: "dataerrpropertie",
data: {
key: p,
value: subdata[p],
minLength: properties[p].minLength,
},
});
}
if (
properties[p].maxLength &&
!Checkjson.schema.properties.maxLength(
subdata[p],
properties[p].maxLength
)
) {
multimsg.push({
ref: "Checkjson",
msg: "dataerrpropertie",
data: {
key: p,
value: subdata[p],
maxLength: properties[p].maxLength,
},
});
}
if (
properties[p].multipleOf &&
!Checkjson.schema.properties.multipleOf(
subdata[p],
properties[p].multipleOf
)
) {
multimsg.push({
ref: "Checkjson",
msg: "dataerrpropertie",
data: {
key: p,
value: subdata[p],
multipleOf: properties[p].multipleOf,
},
});
}
if (
properties[p].minimum ||
properties[p].maximum ||
properties[p].exclusiveMinimum ||
properties[p].exclusiveMaximum
) {
// test range
if (
!Checkjson.schema.properties.range(
subdata[p],
properties[p].minimum,
properties[p].exclusiveMinimum,
properties[p].maximum,
properties[p].exclusiveMaximum
)
) {
multimsg.push({
ref: "Checkjson",
msg: "dataerrpropertie",
data: {
key: p,
value: subdata[p],
minimum: properties[p].minimum,
maximum: properties[p].maximum,
exclusiveMinimum: properties[p].exclusiveMinimum,
exclusiveMaximum: properties[p].exclusiveMaximum,
},
});
}
}
if (
properties[p].enum &&
!Checkjson.schema.properties.enum(subdata[p], properties[p].enum)
) {
multimsg.push({
ref: "Checkjson",
msg: "dataerrpropertie",
data: { key: p, value: subdata[p], enumlst: properties[p].enum },
});
}
if (properties[p].format) {
properties[p].pattern =
Checkjson.schema.properties.format[properties[p].format];
}
if (
properties[p].pattern &&
!Checkjson.schema.properties.pattern(subdata[p], properties[p].pattern)
) {
multimsg.push({
ref: "Checkjson",
msg: "dataerrpropertie",
data: { key: p, value: subdata[p], pattern: properties[p].pattern },
});
}
} else if (schema.required && schema.required.includes(p)) {
multimsg.push({
ref: "Checkjson",
msg: "dataerrpropertierequired",
data: { key: p, required: true },
});
}
});
return multimsg
};//end propertiescheck()
if (withschemacheck) {
const validschema = Checkjson.schema.validation(schema);
if (validschema.status != 200) return validschema;
}
let multi=propertiescheck(schema.properties,data)
const res = {};
if (multi.length > 0) {
res.status = 417;
res.multimsg = multi;
} else {
res.status = 200;
res.ref = "Checkjson";
res.msg = "validcheck";
}
if (schema.apxid) {
res.data={apxid : data[schema.apxid],itm:data};
}
return res;
};
if (typeof module !== "undefined") module.exports = Checkjson;

113
models/Contracts.js Executable file
View File

@@ -0,0 +1,113 @@
const fs = require( 'fs-extra' );
const glob = require( 'glob' );
const moment = require( 'moment' );
const axios = require( 'axios' );
const conf=require(`../conf.json`)
/*
Model that will process actions plan for each client like sending email campain, or anything that
are plan in /tribes/tribeid/actions/todo
*/
const Cards = {}; //require('../../models/Cards');
const Contracts = {};
/*
Send if envoicampain a liste of email in param.msg.destperso with param.headers
if not envoicampain, it just return a test about what to send
@param = {headers, msg:{destperso}}
*/
Contracts.sendcampain = async ( param, envoicampain ) => {
if( envoicampain ) {
// Carefull w the action post outputs/msg just wait the feedback of the 1st message
const retcampain = await axios.post( 'https://mail.maildigit.fr/outputs/msg', param.msg, {
headers: param.headers
} );
if( retcampain.status !== 200 ) {
console.log( "err", retcampain.payload.moreinfo );
fs.appendFileSync( `${conf.tribes}/log_erreurglobal.txt`, moment( new Date() )
.format( 'YYYYMMDD HH:mm:ss' ) + ' - IMPOSSIBLE TO SEND CAMPAIN TODO for :' + param.tribeid + ' -- ' + retcampain.payload.moreinfo + '\n', 'utf-8' );
};
return retcampain;
} else {
// permet de tester ce qu'il y a à envoyer
let premieremail = "";
for( let i = 0; i < param.msg.destperso.length; i++ ) {
premieremail += param.msg.destperso[ 0 ].email + ",";
}
return {
status: 201,
payload: {
info: [ 'simplecomptage' ],
model: 'Contracts',
moreinfo: "#email: " + param.msg.destperso.length + " - 5 1st emails: " + premieremail
}
};
}
}
Contracts.initActiontodo = async ( envoie ) => {
const datedeb = moment( new Date() )
.format( 'YYYYMMDD HH:mm:ss' );
let todo, actiondone;
let log = {
nbaction: 0,
nbactionexec: 0,
nbactionerr: 0,
actionlist: ""
};
const listclient = fs.readJsonSync( `${conf.tribes}/tribeids.json` );
for( let clid in listclient ) {
console.log( listclient[ clid ] );
let listaction = glob.sync( `${conf.tribes}/${listclient[clid]}/actions/todo/*.json` );
for( let action in listaction ) {
console.log( listaction[ action ] )
log.nbaction++;
todo = fs.readJsonSync( listaction[ action ] );
let passdate = true;
// currentdate doit etre après la startDate si existe et avant valideuntilDate si existe
// console.log('test now est avant date start ', moment() < moment(todo.startDate, 'YYYYMMDD HH:mm:ss').toDate());
if( todo.startDate && ( moment() < moment( todo.startDate, 'YYYYMMDD HH:mm:ss' )
.toDate() ) ) {
passdate = false;
};
// currentdate ne doit pas depasser la date de validité de la tache
// console.log('test now est après la date de validite ', moment() > moment(todo.validuntilDate, 'YYYYMMDD HH:mm:ss').toDate());
if( todo.valideuntilDate && ( moment() > moment( todo.validuntilDate, 'YYYYMMDD HH:mm:ss' )
.toDate() ) ) {
passdate = false;
};
// currentdate
if( passdate && todo.action && todo.error == "" ) {
log.nbactionexec++;
const actiondone = await Contracts[ todo.action ]( todo, envoie );
todo.datesRun.push( moment( new Date() )
.format( 'YYYYMMDD HH:mm:ss' ) );
//console.log("actiondone"
log.actionlist += "STATUS:" + actiondone.status + " -- " + listaction[ action ] + "\n";
if( actiondone.status == 200 ) {
todo.error = "";
} else {
log.nbactionerr++;
todo.error += "status : " + actiondone.status + ' ' + actiondone.payload.moreinfo;
};
if( parseInt( todo.maxnumberoftime ) && todo.maxnumberoftime != "999" && ( todo.datesRun.length >= parseInt( todo.maxnumberoftime ) ) ) {
//archive en done this triggeraction
fs.outputJsonSync( listaction[ action ].replace( '/todo/', '/done/' ), todo, {
spaces: 2
} );
fs.unlinkSync( listaction[ action ] );
} else {
fs.outputJsonSync( listaction[ action ], todo, {
spaces: 2
} );
}
} else {
log.actionlist += "STATUS : not executed " + listaction[ action ] + "\n";
};
};
};
const trace = "###################### LOGS ####################\nSTART:" + datedeb + " END:" + moment( new Date() )
.format( 'YYYYMMDD HH:mm:ss' ) + "\n nombre d'actions analysées : " + log.nbaction + " dont executées : " + log.nbactionexec + " dont en erreur: " + log.nbactionerr + "\n" + log.actionlist;
fs.appendFileSync( `${conf.tribes}/log.txt`, trace, 'utf-8' );
return "done";
}
module.exports = Contracts;

260
models/Nations.js Executable file
View File

@@ -0,0 +1,260 @@
const bcrypt = require("bcrypt");
const fs = require("fs-extra");
const glob = require("glob");
const jwt = require("jwt-simple");
const axios = require("axios");
const path = require("path");
const conf = require(`../../conf/townconf.json`);
const Odmdb = require("./Odmdb.js");
// lowercase 1st letter is normal
const towns = require("./Towns.js");
const pagans = require("./Pagans.js");
/*
Blockchain manager
* Manage network directory of nations and towns
* read Blockchain and search,
* submit a transaction (now) or contract (futur) to store from userA.pubkey to userB.pubkey a number of AXESS
* mine to be able to register a block and create AXESS
* manage APIXP rules 20 M APIXP 1AXESS = 1 block validation
* manage contract = action if something appened validate by a proof of work
*/
const Nations = {};
Nations.init = () => {
console.group("init Nations");
};
Nations.chaintown = (nationId, townId) => {
/**
* if not already exist Add a requested town into conf.towns.push({ "townId": "wall", "nationId": "ants", "dns": "wall-ants.ndda.fr" })
*/
};
Nations.updateobjectsfromfreshesttown = (dnstownlist, objectidx) => {
/**
* Get lasttime update per apxtri object then choose the latest source and update local town
* if an item exist localy and does not from the town requested
* @Param {array} dnstownlist list of dns to get latest data
* @Param {object} objectidx objectnme:idxfile {agans:"alias_all.json",...}
* @return create/update nationchains/pagans town nation
*/
const localversion = {};
const objlist = Object.keys(objectidx);
objlist.forEach((o) => {
let objconf = {
name: o,
schema: `../nationchains/tribes/adminapi/schema/${o}.jsons`,
lastupdate: -1,
};
if (fs.existsSync(`../nationchains/${o}/conf.json`)) {
objconf = fs.readJsonSync(`../nationchains/${o}/conf.json`);
} else {
fs.outputJsonSync(`../nationchains/${o}/conf.json`, objconf);
}
localversion[o] = [conf.dns[0], objconf.lastupdate];
});
//console.log(localversion);
for (let t = 0; t < dnstownlist.length; t++) {
if (conf.townId != dnstownlist[t].townId) { // to avoid update itself
let promiseconf = [];
let objecttotest = [];
objlist.forEach((o) => {
//console.log(`https://${dnstownlist[t].dns}/nationchains/${o}/conf.json`);
objecttotest.push(o);
promiseconf.push(
axios.get(`https://${dnstownlist[t].dns}/nationchains/${o}/conf.json`)
);
});
Promise.all(promiseconf)
.then((reps) => {
let promiseidx = [];
let objecttoupdate = [];
let objlastupdate = [];
for (let i = 0; i < objecttotest.length; i++) {
if (
parseInt(reps[i].data.lastupdate) >
parseInt(localversion[reps[i].data.name][1])
) {
// add promise to get data
/*console.log(
`https://${dnstownlist[t].dns}/nationchains/${
reps[i].data.name
}/idx/${objectidx[reps[i].data.name]}`
);*/
objecttoupdate.push(objecttotest[i]);
objlastupdate.push(reps[i].data.lastupdate);
promiseidx.push(
axios.get(
`https://${dnstownlist[t].dns}/nationchains/${
reps[i].data.name
}/idx/${objectidx[reps[i].data.name]}`
)
);
}
}
Promise.all(promiseidx)
.then((rets) => {
for (let j = 0; j < objecttoupdate.length; j++) {
Odmdb.updatefromidxall(
objecttoupdate[j],
objectidx[objecttoupdate[j]],
rets[j].data,
objlastupdate[j]
);
}
})
.catch((err) => {
console.log("ERR get idx data");
console.log(err);
});
})
.catch((err) => {
console.log("ERR get conf lastupdate");
console.log(err);
});
}
}
return {status:200,ref:"Nations",msg:"updated",data:{}};
};
Nations.synchronizeold = () => {
/*
Run process to communicate with a list of towns to update network and transaction
*/
//update himself then send to other information
if (process.env.NODE_ENV != "prod") {
// Not concerned
return {};
}
const initcurrentinstance = {
fixedIP: "",
lastblocknumber: 0,
firsttimeupdate: 0,
lastimeupdate: 0,
positifupdate: 0,
negatifupdate: 0,
pubkeyadmin: "",
tribeids: [],
logins: [],
knowninstance: [],
};
let currentinstance = initcurrentinstance;
try {
currentinstance = fs.readFileSync(
`${conf.tribes}/${conf.mayorId}/nationchains/nodes/${conf.rootURL}`,
"utf-8"
);
} catch (err) {
console.log("first init");
}
const loginsglob = fs.readJsonSync(`${conf.tmp}/loginsglob.json`, "utf-8");
currentinstance.logins = Object.keys(loginsglob);
currentinstance.tribeids = [...new Set(Object.values(loginsglob))];
currentinstance.instanceknown = glob.Sync(
`${conf.tribes}/${conf.mayorId}/nationchains/nodes/*`
);
//Save it
fs.outputJsonSync(
`${conf.tribes}/${conf.mayorId}/nationchains/nodes/${conf.rootURL}`,
currentinstance
);
// proof of work
// try to find a key based on last block with difficulty
// if find then send to all for update and try to get token
// in any case rerun Nations.synchronize()
currentinstance.instanceknown.forEach((u) => {
if (u != conf.rootURL) {
//send currentinstance info and get back state of
axios
.post(`https://${u}/nationchains/push`, currentinstance)
.then((rep) => {
newdata = rep.payload.moreinfo;
//Available update info
fs.readJson(
`${conf.tribes}/${conf.mayorId}/nationchains/nodes/${u}`,
(err, data) => {
if (err) {
data.negatifupdate += 1;
data.lasttimeupdate = Date.now();
} else {
data.positifupdate += 1;
data.lastimeupdate = Date.now();
data.tribeids = newdata.tribeids;
data.logins = newdata.logins;
data.lastblocknumber = newdata.lastblocknumber;
newdata.knowninstance.forEach((k) => {
if (!data.knowninstance.includes(k)) {
data.knowninstance.push(k);
//init the domain for next update
initcurrentinstance.firsttimeupdate = Date.now();
fs.outputJson(
`${conf.tribes}/${conf.mayorId}/nationchains/nodes/${k}`,
initcurrentinstance,
"utf-8"
);
}
});
}
//save with info
fs.outputJson(
`${conf.tribes}/${conf.mayorId}/nationchains/nodes/${u}`,
data
);
}
);
})
.catch((err) => {
//Not available
data.negatifupdate += 1;
data.lasttimeupdate = Date.now();
fs.outputJson(
`${conf.tribes}/${conf.mayorId}/nationchains/nodes/${u}`,
data
);
});
}
});
};
Nations.create = (conf) => {
/*
@conf from a nationchains/socialworld/setup/townSetup {object, nationId, townId, dns}
@return
*/
const res = {};
if (conf.object == "towns") {
Odmdb.create("nationchains/socialworld/objects", "towns", conf);
}
const nations = fs.readJsonSync(
"./nationchains/nations/idx/nationId_all.json"
);
if (!ObjectKeys(nations).includes(conf.nationId)) {
res.status = 404;
res.info = `your nationId ${conf.nationId} does not exist you have to choose an existing one`;
return res;
}
const towns = fs.readJsonSync("./nationchains/towns/idx/townId_all.json");
if (towns[conf.nationId].includes(conf.townId)) {
res.status = 409;
res.info = `This conf.townId already exist you have to find a unique town name`;
return res;
}
const towndata = {
uuid: conf.townId,
nationid: conf.nationId,
url: `${conf.townId}.${conf.nationId}.${conf.dns}`,
status: conf.dns == "unchain" ? "unchain" : "tochain",
};
const metatown = fs.readJsonSync(
"./nationchains/socialworld/metaobject/towns.json"
);
Odmdb.add(objectpath, towns, metatown, towndata);
fs.outputJsonSync(
`./nationchains/socialworld/objects/towns/${townId}.json`,
towndata
);
res.status = 200;
res.info = `${townId} create for ${nationId} nation`;
return res;
};
module.exports = Nations;

223
models/Notifications.js Normal file
View File

@@ -0,0 +1,223 @@
const glob = require("glob");
const path = require("path");
const fs = require("fs-extra");
const axios = require("axios");
//const smtp = require("smtp-client");
const nodemailer = require("nodemailer");
const conf = require(`../../conf/townconf.json`);
/**
* To manage any communication between Pagan
* mayor druid emailing/sms/paper from tribe register smtp, simcard, mail api to Person(s) / Pagan(s)
* volatile notification message from tribe activities to Pagans / person ()
*
*/
const Notifications = {};
Notifications.get = (alias, tribeId) => {
const notiffile = `../nationchains/tribes/${req.params.tribeId}/notifications/${req.params.alias}.json`;
const msg = fs.existsSync(notiffile) ? fs.readJSONSync(notiffile) : {};
return {
status: 200,
ref: "Notification",
msg: "Messagelist",
data: { notif: [{ tribeId, msg }] },
};
};
Notifications.sendsms = async (data, tribeId) => {
/**
* Never use need wallet in mailjet to test
* To set up with mailjet see https://dev.mailjet.com/sms/guides/send-sms-api/#authentication
*
* @param {string} data.To a phone number with international +3360101010101
* @param {string} data.Text text to send
*
* a conf.sms with {url:"smsurl", Token:"", From:""}
*
*
*/
if (!conf.sms) {
return {
status: 412,
ref: "Notifications",
msg: "missingconf",
data: { tribe: tribeId },
};
}
let missingk = [][("To", "Text")].forEach((k) => {
if (!data[k]) {
missingk.push(k);
}
});
if (missingk.lenght > 0) {
return {
status: 428,
ref: "Notifications",
msg: "missingdata",
data: { missingk: missingk },
};
}
let confsms = conf.sms;
if (
fs.existsSync(
`../nationchains/tribes/itm/${req.session.header.xtribe}.json`
)
) {
const conftrib = fs.readJSONSync(
`../nationchains/tribes/itm/${req.session.header.xtribe}.json`
);
if (conftrib.sms) confsms = conftrib.sms;
}
data.From = confsms.From;
const sendsms = await axios.post(confsms.url, {
headers: {
Authorization: `Bearer ${confsms.MJ_TOKEN}`,
"Content-Type": "application/json",
},
body: JSON.stringify(data),
});
if (sendsms.status == 200) {
return {
status: 200,
ref: "Notifications",
msg: "successfullsentsms",
data: {},
};
} else {
return {
status: sendsms.status,
ref: "Notifications",
msg: "errsendsms",
data: { err: sendsms.data },
};
}
/* si tout se passe bien:
{
"From": "MJPilot",
"To": "+33600000000",
"Text": "Have a nice SMS flight with Mailjet !",
"MessageId": "2034075536371630429",
"SmsCount": 1,
"CreationTS": 1521626400,
"SentTS": 1521626402,
"Cost": {
"Value": 0.0012,
"Currency": "EUR"
},
"Status": {
"Code": 2,
"Name": "sent",
"Description": "Message sent"
}
}
}
*/
};
Notifications.sendmail = async (data, tribe) => {
/**
* See https://nodemailer.com/message/ for available fields to add
* @param {string} [data.from] an email authorized by smtp used priority from header xtribe
* @param {string} data.to list of email separate by ,
* @param {string} data.subject
* @param {string} data.html
* @param {string} data.text
* @param {string} [data.Cc] list of email in copy
* @param {string} [data.Bcc] list of email in hidden copy
* @param {string} [data.attachments] array of
* {filename:'filename.txt',content:'txt'},
* {filename:'img.svg',path:"https://....svg", contentType:'image/svg'}
* {filename:'img.svg',path:"https://....svg", contentType :'text/plain'}
* {filename:'img.png',path:"data:text/svg;base64.aGVsbG8gd29ybGQ="}
*
* @example data
* {"to":"wall-ants.ndda.fr",
* "subject":"Test",
* "html":"<h1>test welcome</h1>",
* "text":"test welcome",
* "attachments":[{filename:"text.txt",pathfile:"/media/phil/textA.txt","contenttype":"text/plain"}]
* }
* @return {object}
* { status: 200, ref:"pagans",msg:"",data: { } }
*
*
*/
if (!conf.smtp || !conf.emailcontact) {
return {
status: 412,
ref: "Notifications",
msg: "missingconf",
data: { tribe: tribe },
};
}
if (!data.from) {
data.from = conf.emailcontact;
}
let missingk = [];
["from", "to", "subject", "html", "text"].forEach((k) => {
if (!data[k]) {
missingk.push(k);
}
});
if (missingk.lenght > 0) {
return {
status: 428,
ref: "Notifications",
msg: "missingdata",
data: { missingk: missingk },
};
}
let confsmtp = conf.smtp;
const conftribfile = `../nationchains/tribes/itm/${tribe}.json`;
if (fs.existsSync(conftribfile)) {
const conftrib = fs.readJSONSync(conftribfile);
confsmtp = conftrib.smtp;
if (!data.from) data.from = conftrib.emailcontact;
}
//const client = smtp.connect(confsmtp);
const transporter = await nodemailer.createTransport(confsmtp);
//@todo add attachments management
if (data.filelist) {
data.attachments=[];
let missingfile = [];
data.filelist.forEach((fo) => {
if (fs.existsSync(fo.pathfile)){
}else{ missingfile.push(fo.pathfile);}
});
if (missingfile.lenght > 0)
return {
status: 428,
ref: "Notifications",
msg: "missingfile",
data: { missingfile: missingfile },
};
}
//console.log("data:", data);
const res = await transporter.sendMail(data);
if (
res.accepted &&
data.to.split(",").reduce((acc, m) => acc && res.accepted.includes(m), true)
) {
data.accepted = res.accepted;
data.rejected = res.rejected;
return {
status: 200,
ref: "Notifications",
msg: "successfullsentemail",
data,
};
} else if (res.accepted && res.rejected) {
data.accepted = res.accepted;
data.rejected = res.rejected;
return { status: 410, ref: "Notifications", msg: "errsendmail", data };
} else {
data.errmailer = res.err;
return { status: 417, ref: "Notifications", msg: "errsendmail", data };
}
};
module.exports = Notifications;

863
models/Odmdb.js Normal file
View File

@@ -0,0 +1,863 @@
const glob = require("glob");
const path = require("path");
const fs = require("fs-extra");
const dayjs = require("dayjs");
const axios = require("axios");
const conf = require(`../../conf/townconf.json`);
const Checkjson = require(`./Checkjson.js`);
const { promiseHooks } = require("v8");
/**
* This manage Objects for indexing, searching, checking and act to CRUD
* @objectPathName = objectpath/objectname
* objectpath/objectname/conf.json
* /idx/all_key1.json = {key1value:{object}}
* lst_key1.json = [key1valA,key1val2,...]
* key2_key1.json = {key2value:[key1value]}
* all_key1_filtername = {key1value:{object}}
* /itm/apxidvalue.json
* in conf.json:
* {
* "name": "object name ex:'nations'",
* "schema": "relativ schema from dirapi dirtown ex:'adminapi/schema/nations.json'"",
* "lastupdateschema": 0, time stamp last schema update
* "lastupdatedata":0 time stamp last itm update
* }
*
* Specifics key in schema to apxtri:
* apxid : the field value to use to store item
* apxuniquekey : list of field that has to be unique you cannot have 2 itm with same key value
* apxidx : list of index file /idx/
* { "name":"lst_fieldA", "keyval": "alias" }, => lst_fieldA.json = [fieldAvalue1,...]
{ "name":"all_fieldA", "keyval": "fieldA" }, => all_fieldA.json =
if fieldA in apxuniquekey = {fieldAvalue1:{object}}
not in apxuniquekey = {fieldAvalue1:[{object}]}
{ "name":"word_fieldA", "keyval": "fieldA", "objkey": ["fieldB"]}, => word_fieldA.json
if fieldA in apxuniquekey = {fieldAvalue1:fieldBvalue,}
if fieldA not in uniquekey = {fieldAvalue1: [fieldBv1,fieldBv2,]}
{ "name":"word_fieldA", "keyval": "fieldA", "objkey": ["fieldB","fieldC"]}, => word_fieldA.json
if fieldA in apxuniquekey = {fieldAvalue1:{fieldB:val,fieldC:val},}
if fieldA not in uniquekey = {fieldAvalue1: [{fieldB:val,fieldC:val},]}
* apxaccessrights : list of profil with CRUD accesrights
**/
const Odmdb = {};
/**
* @api syncObject
* @param {string} url to an existing object conf (/objectname/conf.json)
* @param {timestamp} timestamp
* 0 => rebuild local object from all_{idapx}.json
* >0 => update itm and idx search by datetime
* @param
*/
Odmdb.syncObject = () => {};
/**
* @api createObject: create a space to host object
*
* @source {string} "new", url,
* @schemapath {string} path to create or replace a schema ${schemaPath}/schema/
* @objectPath {string} path where object are store
* @objectName {string} name of the object
* @schema {object} the json schema for this object
* @lgjson {object} the json file for a specific language
* @lg {string} the 2 letters language
*
* Setup a new object localy =>
* source
*
* - from scratch =>
* Create
* - from a synchronization
* Download from source all_{apxid}.json
*
*
*/
Odmdb.createObject = (
source,
schemaPath,
objectPath,
objectName,
schema,
lgjson,
lg
) => {
if (!fs.existsSync(schemaPath)) {
return {
status: 404,
ref: "Odmdb",
info: "pathnamedoesnotexist",
moreinfo: { fullpath: schemaPath },
};
}
if (!fs.existsSync(objectPath)) {
return {
status: 404,
ref: "Odmdb",
info: "pathnamedoesnotexist",
moreinfo: { fullpath: objectPath },
};
}
// store schema file if not empty undefined or {}
if (
schema &&
!(Object.keys(schema).length === 0 && schema.constructor === Object)
) {
fs.outputJSONSync(`${schemaPath}/schema/${objectName}.json`, schema, {
spaces: 2,
});
}
if (
lgjson &&
lg &&
!(Object.keys(lgjson).length === 0 && lgjson.constructor === Object)
) {
fs.outputJSONSync(`${schemaPath}/lg/${objectName}_${lg}.json`, lgjson, {
spaces: 2,
});
}
//create environnement object with the new schema config
if (!fs.existsSync(`${objectPath}/${objectName}`)) {
fs.outputJsonSync(
`${objectPath}/${objectName}/idx/confjson`,
{ schema: `${schemaPath}/schema/${objectName}.json` },
{ spaces: 2 }
);
}
return { status: 200 };
};
/**
* Update an object
* @param {string} objectPathname
* @param {object} meta update request
* lg:
* lgobj: object to store in /lg/objectname_lg.json
* schema: an update schema
* @return {status, ref:"Odmdb", msg:"", data}
*
* Create a tmp object env and check existing object to identify any issues
* If all is fine then apply change by replacing
*/
Odmdb.updateObject = (objectPathname, meta) => {};
/**
* Get a schema from objectPathname
*
* todo only local schema => plan a sync each 10minutes
* @schemaPath local path adminapi/schema/objectName.json or /tribename/schema/objectName
* @validschema boolean if necessary to check schema or not mainly use when change schema
* @return {status:200,data:{conf:"schemaconf",schema:"schemacontent"} }
*/
Odmdb.Schema = (objectPathname, validschema) => {
const getpath = (schemaPath) => {
if (schemaPath.slice(-5) != ".json") schemaPath += ".json";
if (schemaPath.substring(0, 4) == "http") {
// lance requete http pour recuperer le schema avec un await axios
} else {
schemaPath = `../nationchains/tribes/${schemaPath}`;
/*if (schemaPath.substring(0, 9) == "adminapi/") {
schemaPath = `${conf.dirapi}/${schemaPath}`;
} else {
schemaPath = `${conf.dirtown}/tribes/${schemaPath}`;
}*/
if (!fs.existsSync(schemaPath)) {
return {};
} else {
return fs.readJsonSync(schemaPath);
}
}
};
console.log(`${objectPathname}/conf.json`);
const confschema = fs.readJsonSync(`${objectPathname}/conf.json`);
console.log(confschema);
const res = {
status: 200,
ref: "Odmdb",
msg: "getschema",
data: { conf: confschema },
};
res.data.schema = getpath(confschema.schema);
if (Object.keys(res.data.schema).length == 0) {
return {
status: 404,
ref: "Odmdb",
msg: "schemanotfound",
data: { objectPathname, schema: {} },
};
}
//looking for type:object with $ref to load and replace by ref content (ref must be adminapi/ or tribeid/)
//@todo only 1 level $ref if multi level need to rewrite with recursive call
Object.keys(res.data.schema.properties).forEach((p) => {
if (
res.data.schema.properties[p].type == "object" &&
res.data.schema.properties[p]["$ref"]
) {
const subschema = getpath(res.data.schema.properties[p]["$ref"]);
if (Object.keys(res.data.schema).length == 0) {
res.status = 404;
res.msg = "missingref";
res.data.missingref = res.data.schema.properties[p]["$ref"];
return res;
} else {
subschema.description += ` from external schema: ${res.data.schema.properties[p]["$ref"]}`;
res.data.schema.properties[p] = subschema;
}
}
});
if (!res.data.schema.apxid) {
return {
status: 406,
ref: "Odmdb",
msg: "missingprimarykey",
data: {},
};
}
if (res.data.schema.apxidx) {
//add apxidx to apxuniquekey in case not
if (!res.data.schema.apxuniquekey.includes(res.data.schema.apxid)) {
res.data.schema.apxuniquekey.push(res.data.schema.apxid);
}
res.data.schema.apxidx.forEach((idx) => {
if (
idx.objkey &&
!res.data.schema.apxuniquekey.includes(idx.keyval) &&
!idx.objkey.includes(res.data.schema.apxid)
) {
return {
status: 406,
ref: "Odmdb",
msg: "unconsistencyapxidx",
data: {
name: idx.name,
keyval: idx.keyval,
objkey: idx.objkey,
apxid: res.data.schema.apxid,
},
};
}
});
}
if (validschema || 1 == 1) {
// return {status:200, ref, msg} or {status!:200,multimsg:[{ref,msg;data}]}
const check = Checkjson.schema.validation(res.data.schema);
if (check.status != 200) {
res.multimsg = check.multimsg;
res.status = check.status;
}
}
return res;
};
Odmdb.search = (objectPath, objectName, search) => {
/*
@search= {
txt: string,
algo: match | pattern | fuzzy
fieldstring:[list of field],
indexfilter:{index1:[val1,val2 | ] }
}
Return data:[uuids]
example: search exact match hill in townId
heavy search={txt:"hill",algo:"match",fieldstring:"toxnId"}
light search={txt:"hill", algo:"match", indexfilter:{"key":"townId","value":[]}}
light search={txt:"hill", algo:"match", indexfilter:{"key":"nationId","value":"ants"}}
*/
const schema = Odmdb.schema(objectPath, objectName);
if (schema.status != 200) return schema;
};
Odmdb.r = (objectPathname, apxid, role) => {
const itmf = `${objectPathname}/itm/${apxid}.json`;
if (!fs.existsSync(itmf)) {
return {
status: 404,
ref: "Odmdb",
msg: "persondoesnotexist",
data: { person: apxid },
};
}
const getschema = Odmdb.Schema(objectPathname, true);
if (getschema.status != 200) return getschema;
const itm = fs.readJsonSync(itmf);
if (itm.owner && itm.owner == role.xalias) {
role.xprofils.push("owner");
}
const accessright = Odmdb.accessright(
getschema.data.schema.apxaccessrights,
role
);
if (!accessright.R) {
return {
status: 403,
ref: "Odmdb",
msg: "forbidden",
data: { person: apxid },
};
}
const data = {};
accessright.R.forEach((p) => {
data[p] = itm[p];
});
return { status: 200, ref: "Odmdb", msg: "found", data };
};
/**
* To get an array of item (itm) per primarykey with a list of field
* Object are store in objectPath/objectName/conf.json contain
*
* @objectPathname where object are store (where /object/conf.json indicate where the schema is)
* @uuidprimarykeyList list of uuid requested
* @role {xalias,xprofiles} allow to get accessright come from header
* @propertiesfilter (optionnal) key to return for each object (if undefined then return all)
* @Return {status:200; data:{uuid:{data filter by @propertiesfilter},uuid:"notfound"}}
ASUP doit être gerer au niveau des view des index
*/
Odmdb.ASUPreads = (objectPathname, apxidlist, role, propertiesfilter) => {
const res = { status: 200, data: {} };
const getschema = Odmdb.Schema(objectPathname, true);
if (getschema.status != 200) return getschema;
// Test if readable at least if owner
role.xprofils.push("owner");
const accessright = (Odmdb.accessright =
(getschema.data.schema.apxaccessright, role.xprofils));
if (!accessright.R) {
return {
status: 403,
ref: "Odmdb",
msg: "accessforbidden",
data: { crud: "R", accessright },
};
}
apxidlist.forEach((id) => {
if (fs.existsSync(`${objectPathname}/itm/${id}.json`)) {
const objectdata = fs.readJsonSync(`${objectPathname}/itm/${id}.json`);
if (objectdata.owner && objectdata.owner == role.xalias) {
if (!role.xprofils.includes("owner")) role.xprofils.push("owner");
} else {
if (role.xprofils.includes("owner"))
role.xprofils = role.xprofils.filter((e) => e !== "owner");
}
const accessright = Odmdb.accessright(
getschema.data.schema.apxaccessright,
role
);
if (!accessright.R) {
res.data[id] = "forbiden";
} else {
let newpropertiesfilter = Object.keys(objectdata);
if (accessright.R.length > 0) {
const setaccess = new Set(accessright.R);
if (!propertiesfilter) propertiesfilter = Object.keys(objectdata);
newpropertiesfilter = propertiesfilter.filter((f) =>
setaccess.has(f)
);
}
const objinfo = {};
newpropertiesfilter.forEach((k) => {
if (objectdata[k]) objinfo[k] = objectdata[k];
});
res.data[id] = objinfo;
}
} else {
res.data[id] = "notfound";
}
});
return res;
};
/**
* Convert profils in accessright
* @param {*} apxaccessright from schema object {profilname:{R}}
* @param {*} role {xprofils,xalias} accessible after isAuthenticated
* @returns access right to C create if present, to read (properties list or all if empty), to Update properties list or all if empty, D elete
* example: {"C":[],"R":[properties list],"U":[properties ist],"D":[]}
*/
Odmdb.accessright = (apxaccessrights, role) => {
//console.log("apxaccessrights",apxaccessrights)
//console.log("role",role)
const accessright = {};
role.xprofils.forEach((p) => {
if (apxaccessrights[p]) {
Object.keys(apxaccessrights[p]).forEach((act) => {
if (!accessright[act]) {
accessright[act] = apxaccessrights[p][act];
} else {
accessright[act] = [
...new Set([...accessright[act], ...apxaccessrights[p][act]]),
];
}
//console.log(act,accessright[act])
});
}
});
return accessright;
};
/**
* CUD a data itm into objectPathname if checkJson is valid
* and update idx
* idx is upto date for unique properties but not for list
* @param {string} objectpathname folder name where object are stored
* @param {object} itm an object respecting the checkJson schema in objectPathname/conf.json
* @param {string} crud: C reate U pdate D elete
* @param {array} role {xprofils,xalias} xprofils list of profils like anonymous,pagans, person owner is deuce if object properties owner is alias
* */
Odmdb.cud = (objectPathname, crud, itm, role, runindex = true) => {
const getschema = Odmdb.Schema(objectPathname, true);
if (getschema.status != 200) return getschema;
if (!itm[getschema.data.schema.apxid]) {
return {
status: 406,
ref: "Odmdb",
msg: "apxidmissing",
data: { missingkey: getschema.data.schema.apxid },
};
}
fs.ensureDirSync(`${objectPathname}/itm/`);
fs.ensureDirSync(`${objectPathname}/idx/`);
const existid = fs.existsSync(
`${objectPathname}/itm/${itm[getschema.data.schema.apxid]}.json`
);
/*const pathindex = `${objectPathname}/idx/lst_${getschema.data.schema.apxid}.json`;
if (!fs.existsSync(pathindex)) {
fs.outputJSONSync(pathindex, []);
fs.ensureDirSync(`${objectPathname}/itm/`);
}
const existid = fs
.readJSONSync(pathindex)
.includes(itm[getschema.data.schema.apxid]);
*/
if (existid && crud == "C") {
return {
status: 406,
ref: "Odmdb",
msg: "alreadyexist",
data: {
objectname: path.basename(objectPathname),
key: getschema.data.schema.apxid,
val: itm[getschema.data.schema.apxid],
},
};
}
if (!existid && ["U", "D"].includes(crud)) {
return {
status: 406,
ref: "Odmdb",
msg: "doesnotexist",
data: {
objectname: path.basename(objectPathname),
key: getschema.data.schema.apxid,
val: itm[getschema.data.schema.apxid],
},
};
}
const itmold = existid
? fs.readJSONSync(
`${objectPathname}/itm/${itm[getschema.data.schema.apxid]}.json`
)
: {};
if (existid && itmold.owner && itmold.owner == role.xalias) {
role.xprofils.push("owner");
}
if (!existid && crud == "C" && !itm.owner) {
// set owner cause this is a Create
itm.owner = role.xalias;
role.xprofils.push("owner");
}
//get accessright {C:[],R:[],U:[],D:[]} if exist means authorize, if array contain properties (for R and U) right is only allowed on properties
const accessright = Odmdb.accessright(
getschema.data.schema.apxaccessrights,
role
);
console.log("accessright", accessright);
if (
(crud == "C" && !accessright.C) ||
(crud == "D" && !accessright.D) ||
(crud == "U" && !accessright.U)
) {
return {
status: 403,
ref: "Odmdb",
msg: "accessforbidden",
data: { crud, accessright },
};
}
//delete or save
if (crud == "D") {
itmold["dt_delete"] = dayjs().toISOString();
fs.outputJSONSync(
`${objectPathname}/delitm/${itmold[getschema.data.schema.apxid]}.json`,
itmold
);
fs.rmSync(
`${objectPathname}/itm/${itmold[getschema.data.schema.apxid]}.json`
);
} else {
// if Create Update erase old version
let itmtostore = itm;
if (crud == "U" && accessright.U.length > 0) {
itmtostore = itmold;
accessright.U.forEach((p) => {
itmtostore[p] = itm[p];
});
itmtostore.dt_update = dayjs().toISOString();
}
if (crud == "C") itmtostore.dt_create = dayjs().toISOString();
// check consistency of datatostore
const chkdata = Checkjson.schema.data(
getschema.data.schema,
itmtostore,
false
);
if (chkdata.status != 200) return chkdata;
if (!getschema.data.schema.apxuniquekey)
getschema.data.schema.apxuniquekey = [];
console.log(`${objectPathname}/itm/${chkdata.data.apxid}.json`);
console.log(chkdata.data.itm);
fs.outputJSONSync(
`${objectPathname}/itm/${chkdata.data.apxid}.json`,
chkdata.data.itm
);
}
//console.log("getschema", getschema);
//rebuild index if requested
console.log("runidx", runindex);
console.log(objectPathname);
if (runindex) Odmdb.runidx(objectPathname, getschema.data.schema);
getschema.data.conf.lastupdatedata = dayjs().toISOString();
fs.outputJSONSync(`${objectPathname}/conf.json`, getschema.data.conf);
return {
status: 200,
ref: "Odmdb",
msg: "cudsuccessfull",
data: {},
};
};
/**
* apxidx:[list of index file
* { name: string, // filename in /idx/name.json
* type: array (list of unique value found in keyval), example: [val1, val2, val3,....] eventualy filter
* view (only unique key and object with objkey eventually filter by filter ) example: {idalias:{itm}}
* distribution (=> {val of keyval:[apxid itm value]} with )
* keyval:string, // a propertie of type string or array
* objkeys:[] and array of properties if empty [] means all properties, if 1 element => an array of this properties
* filter : an eval function base on itm data (itm.key) that return true (return data item) or false (ignore item)
*
*/
Odmdb.runidx = (objectPathname, schema) => {
console.log(`idx for ${objectPathname}`);
if (!schema || !schema.apxid) {
const getschema = Odmdb.Schema(objectPathname, true);
if (getschema.status != 200) return getschema;
schema = getschema.data.schema;
}
const ventil = {};
schema.apxidx.forEach((id) => {
ventil[id.name] = id;
if (id.keyval.includes(".")) {
// Means keyval is an object that can be only string for an idx
ventil[id.name].isobject = true;
ventil[id.name].isunique = false;
ventil[id.name].nbobjkey = 0;
ventil[id.name].keyvaltype = "string";
ventil[id.name].filter = id.filter ? id.filter.replace(/;/g, "") : ""; // check integrity of string
ventil[id.name].data = {};
} else {
ventil[id.name].isunique = schema.apxuniquekey.includes(id.keyval);
ventil[id.name].nbobjkey = id.objkey ? id.objkey.length : 0;
ventil[id.name].keyvaltype = schema.properties[id.keyval].type;
ventil[id.name].filter = id.filter ? id.filter.replace(/;/g, "") : ""; // check integrity of string
ventil[id.name].data = ventil[id.name].type == "array" ? [] : {};
}
});
glob.sync(`${objectPathname}/itm/*.json`).forEach((i) => {
const itm = fs.readJSONSync(i);
Object.keys(ventil).forEach((n) => {
let keep = true;
if (ventil[n].filter != "") {
try {
keep = eval(ventil[n].filter);
} catch (err) {
keep = false;
}
}
if (keep && ventil[n].type == "array" && itm[ventil[n].keyval]) {
if (ventil[n].keyvaltype == "array") {
itm[ventil[n].keyval].forEach((v) => {
if (!ventil[n].data.includes(v)) ventil[n].data.push(v);
});
} else {
if (!ventil[n].data.includes(itm[ventil[n].keyval]))
ventil[n].data.push(itm[ventil[n].keyval]);
}
}
if (
keep &&
ventil[n].type == "view" &&
ventil[n].isunique &&
itm[ventil[n].keyval]
) {
if (ventil[n].nbobjkey == 0)
ventil[n].data[itm[ventil[n].keyval]] = itm;
if (ventil[n].nbobjkey == 1)
ventil[n].data[itm[ventil[n].keyval]] = itm[ventil[n].objkey[0]];
if (ventil[n].nbobjkey > 1) {
const objdata = {};
Object.keys(itm).forEach((k) => {
if (ventil[n].objkey.includes(k)) objdata[k] = itm[k];
});
ventil[n].data[itm[ventil[n].keyval]] = objdata;
}
}
if (keep && ventil[n].type == "distribution" && itm[ventil[n].keyval]) {
const listval =
ventil[n].keyvaltype == "string"
? [itm[ventil[n].keyval]]
: itm[ventil[n].keyval];
// itm[ventil[n].keyval] is an array
listval.forEach((val) => {
if (!ventil[n].data[val]) ventil[n].data[val] = [];
ventil[n].data[val].push(itm[schema.apxid]);
});
}
if (keep && ventil[n].type == "distribution" && ventil[n].isobject && itm[ventil[n].keyval.split('.')[0]]) {
let itmval = JSON.parse(JSON.stringify(itm));
console.log( ventil[n].keyval)
console.log(itmval)
ventil[n].keyval
.split(".")
.forEach((i) => itmval = itmval[i] ? itmval[i] : null);
if (itmval) {
if (!ventil[n].data[itmval]) ventil[n].data[itmval] = [];
ventil[n].data[itmval].push(itm[schema.apxid]);
}
}
});
});
Object.keys(ventil).forEach((n) => {
//console.log(`${objectPathname}/idx/${ventil[n].name}.json`)
fs.outputJSON(
`${objectPathname}/idx/${ventil[n].name}.json`,
ventil[n].data
);
});
};
/**
* create/update idx from itm(s)
*
* @param {string} objectPathname
* @param {object} itm item to Create or to Update or {} if crud == I or crud == D
* @param {object} itmold (current item) if crud == U or D to get previous itm before change or {} if crud==I or C
* @param {letter} crud CUDI C add, U update, D delete I reindex
* @param {array} idx if specific request to rebuild list of idx only if [] then use schema one
* @param {object} schema if empty it use schema from Odmdb.Schema().data.schema
*
* example create alias 12 name fred:
* Odmdb.idxfromitm('.../tribes/ndda/persons',"C",{alias:'12',name:"fred"},{},[], {person schema})
* example update alias 12 in name freddy:
* Odmdb.idxfromitm('.../tribes/ndda/persons',"U",{alias:'12',name:"freddy"},{alias:'12',name:"fred"},[], {person schema})
* example delete alias 12:
* Odmdb.idxfromitm('.../tribes/ndda/persons',"D",{},{alias:'12',name:"fred"},[], {person schema})
* example to rebuild all index from scratch
* Odmdb.idxfromitm('.../tribes/ndda/persons',"I",{},{},[], {person schema})
* example to rebuild only publickey_alias index from scratch
* Odmdb.idxfromitm('.../tribes/ndda/pagans',"I",{},{},[{ name:"publickey_alias",keyval:"publickey",objkey:["alias"]}], {pagans schema})
*
*/
Odmdb.ASUPidxfromitm = (
objectPathname,
crud,
itm,
itmold,
idxs = [],
schema
) => {
console.log(`idxfromitem for ${objectPathname} action:${crud}`);
if (!schema || !schema.apxid) {
const getschema = Odmdb.Schema(objectPathname, true);
if (getschema.status != 200) return getschema;
schema = getschema.data.schema;
}
console.log(schema.apxuniquekey);
const itms = crud == "I" ? glob.sync(`${objectPathname}/itm/*.json`) : [itm];
console.log(itms);
if (crud == "I") {
//reinit all idx
idxs.forEach((idx) => {
fs.remove(`${objectPathname}/idx/${idx.name}.json`);
});
}
let idxtoreindex = []; //store index that has to be reprocessto get full context
idxs = idxs.length == 0 ? schema.apxidx : idxs; // get all index if none
itms.forEach((i) => {
if (crud == "I") {
itm = fs.readJSONSync(i);
}
//console.log(itm);
idxs.forEach((idx) => {
const keyvalisunique = schema.apxuniquekey.includes(idx.keyval); // check if keyval is unique mean store as an object (or string) else store as an array
const idxsrc = `${objectPathname}/idx/${idx.name}.json`;
const idxinit = idx.name.substring(0, 4) == "lst_" ? [] : {}; // select type of idx (array or object)
let idxfile = !fs.existsSync(idxsrc) ? idxinit : fs.readJSONSync(idxsrc);
if (idx.name.substring(0, 4) == "lst_") {
if (["D", "U"].includes(crud)) {
if (keyvalisunique) {
idxfile = idxfile.filter((e) => e !== itmold[idx.keyval]);
} else {
idxtoreindex.push(idx); //@todo
}
}
console.log(idx.keyval);
console.log(itm[idx.keyval]);
if (
["C", "U", "I"].includes(crud) &&
!idxfile.includes(itm[idx.keyval])
) {
idxfile.push(itm[idx.keyval]);
}
} else {
if (!idx.objkey) {
//mean all properties
idx.objkey = Object.keys(schema.properties);
}
if (keyvalisunique && idx.objkey.length == 1) {
if (["D", "U"].includes(crud)) {
delete idxfile[itmold[idx.keyval]];
} else {
idxfile[itm[idx.keyval]] = itm[idx.objkey[0]];
}
}
if (keyvalisunique && idx.objkey.length > 1) {
if (["D", "U"].includes(crud)) {
delete idxfile[itmold[idx.keyval]];
} else {
const itmfilter = {};
idx.objkey.forEach((i) => {
if (itm[i]) itmfilter[i] = itm[i];
});
idxfile[itm[idx.keyval]] = itmfilter;
}
}
if (!keyvalisunique && idx.objkey.length == 1) {
if (
["D", "U"].includes(crud) &&
idxfile[itmold[idx.keyval]].IndexOf(itmold[idx.objkey[0]]) > -1
) {
// U because need to remove previous value before adding it
idxfile[itmold[idx.keyval]].splice(
idxfile[itmold[idx.keyval]].IndexOf(itmold[idx.objkey[0]]),
1
);
}
if (["C", "U", "I"].includes(crud)) {
if (!idxfile[itm[idx.keyval]]) idxfile[itm[idx.keyval]] = [];
if (!idxfile[itm[idx.keyval]].includes(itm[idx.objkey[0]])) {
idxfile[itm[idx.keyval]].push(itm[idx.objkey[0]]);
}
}
}
if (!keyvalisunique && idx.objkey.length > 1) {
if (["D", "U"].includes(crud) && idxfile[itmold[idx.keyval]]) {
// U because need to remove previous value before adding it
let arrayofit = [];
idxfile[itmold[idx.keyval]].forEach((it) => {
if (it[schema.apxid] != itm[schema.apxid]) arrayofit.push(it);
});
idxfile[itmold[idx.keyval]] = arrayofit;
}
if (["C", "U", "I"].includes(crud)) {
const itmfilter = {};
idx.objkey.forEach((i) => {
if (itm[i]) itmfilter[i] = itm[i];
});
if (!idxfile[itm[idx.keyval]]) idxfile[itm[idx.keyval]] = [];
idxfile[itm[idx.keyval]].push(itmfilter);
}
}
}
fs.outputJSONSync(idxsrc, idxfile);
});
});
if (crud != "I") {
//update lastupdatedata to inform something change
const confschema = fs.readJSONSync(`${objectPathname}/conf.json`);
confschema.lastupdatedata = dayjs().toISOString();
fs.outputJSONSync(`${objectPathname}/conf.json`, confschema);
}
return { status: 200, ref: "Odmdb", msg: "successreindex", data: {} };
};
Odmdb.updatefromidxall = (objectname, idxname, data, lastupdate) => {
/**
* Update all itm of objectname from index idx/idxname with data
* if itm exist in local and not in data then /ojectname/conf.json.lastupdate = now
* not then /ojectname/conf.json.lastupdate = lastupdate
* this way mean next time server A want to refresh from B its lastupdate < than on A
*/
let conflastupdate = 0;
let localidx = {};
if (fs.existsSync(`../nationchains/${objectname}/idx/${idxname}`)) {
localidx = fs.readJsonSync(`../nationchains/${objectname}/idx/${idxname}`);
}
Object.keys(data).forEach((id) => {
if (localidx[id]) {
if (
localidx[id].dt_update &&
data[id].dt_update &&
localidx[id].dt_update > data[id].dt_update
) {
// means local information is fresher than the one in data for replacement
// .toISIString ex: 2019-01-25T02:00:00.000Z'
conflastupdate = dayjs().toISOString();
} else {
// replace itm with data
localidx[id] = data[id];
fs.outputJsonSync(
`../nationchains/${objectname}/itm/${id}.json`,
data[id]
);
}
} else {
// add itm
localidx[id] = data[id];
fs.outputJsonSync(
`../nationchains/${objectname}/itm/${id}.json`,
data[id]
);
}
});
//check if it miss id in fresher update means conf.lastupdate will be now to indicate
Object.keys(localidx).forEach((id) => {
if (!data[id]) {
conflastupdate = dayjs().toISOString();
}
});
// update the object files
if (conflastupdate == 0) conflastupdate = lastupdate;
fs.outputJSONSync(`../nationchains/${objectname}/idx/${idxname}`, localidx);
const objconf = fs.readJsonSync(`../nationchains/${objectname}/conf.json`);
objconf.lastupdate = conflastupdate;
fs.outputJsonSync(`../nationchains/${objectname}/conf.json`, objconf);
return {
status: 200,
ref: "Odmdb.js",
info: "Successfullupdate",
data: { objectname, idxname, lastupdate },
};
};
module.exports = Odmdb;

187
models/Pagans.js Normal file
View File

@@ -0,0 +1,187 @@
const glob = require("glob");
const path = require("path");
const dayjs = require("dayjs");
const fs = require("fs-extra");
const axios = require("axios");
const Mustache = require('mustache');
const openpgp = require("openpgp");
const Notifications = require("./Notifications.js");
const Odmdb = require("./Odmdb.js");
const conf = require(`../../conf/townconf.json`);
/**
* Pagan Management numeric Identity and Person (Person = Pagan Id + tribe)
*
*
*
*/
const Pagans = {};
/**
* Remove authentification token after a logout
* @param {string} alias
* @param {string} tribe
* @param {integer} xdays
* @param {string} xhash
* @returns {status:200, ref:"Pagans",msg:"logout"}
* tmpfs name file has to be on line with the tmpfs create by isAuthenticated
* tmpfs contain profils name for a tribe/
*/
Pagans.logout = (alias, tribe, xdays, xhash) => {
//console.log(alias, tribe, xdays, xhash);
// inline with middleware isAuthenticated.js
let tmpfs = `../tmp/tokens/${alias}_${tribe}_${xdays}`;
//max filename in ext4: 255 characters
tmpfs += `_${xhash.substring(150, 150 + tmpfs.length - 249)}.json`;
fs.remove(tmpfs);
console.log('logout token',tmpfs)
return { status: 200, ref: "Pagans", msg: "logout" };
};
/**
* @param {string} alias a alias that exist or not
* @return {object} { status: 200, ref:"pagans",msg:"aliasexist",data: { alias, publicKey } }
* { status: 404, ref:"pagans",msg:"aliasdoesnotexist",data: { alias} }
*
**/
Pagans.getalias = (alias) => {
//bypass Odmdb cause all is public save ressources
if (fs.existsSync(`../nationchains/pagans/itm/${alias}.json`)) {
return {
status: 200,
ref: "Pagans",
msg: "aliasexist",
data: fs.readJSONSync(
`../nationchains/pagans/itm/${alias}.json`
),
};
} else {
return {
status: 404,
ref: "Pagans",
msg: "aliasdoesnotexist",
data: { alias },
};
}
};
/**
* Send email with alias's keys to email or person alias person.recovery.email
*
* If email or pubkey is undefined then get data from tribe/person(alias)
* Send email with keys
* @param {object} data
* @param {string} data.alias
* @param {pgpPrivate} [data.privatekey]
* @param {string} [data.passphrase]
* @param {string} data.tribe
* @param {pgpPublic} [data.publickey]
* @param {string} [data.email]
* @param {string} data.lg
*/
Pagans.sendmailkey = (data) => {
console.log(
data.alias,
"-",
data.privatekey.substring(0,10),
"-",
data.tribe,
"-",
data.passphrase,
"-",
data.publickey.substring(0,10),
"-",
data.email
);
const person = { alias:data.alias, privatekey:data.privatekey, tribe:data.tribe };
if (!data.publickey || !data.email || !data.privatekey) {
const personfile = `../../nationchains/tribes/${data.tribe}/objects/persons/itm/${data.alias}.json`;
if (!fs.existsSync(personfile)) {
return {
status: 404,
ref: "Pagans",
msg: "persondoesnotexist",
data: { alias:data.alias, tribe:data.tribe },
};
}
const persondata= fs.readJsonSync(personfile);
person.email = persondata.recoveryauth.email;
person.publickey = persondata.recoveryauth.publickey;
person.privatekey = persondata.recoveryauth.privatekey;
person.passphrase = persondata.recoveryauth.passphrase;
} else {
person.email = data.email;
person.passphrase = data.passphrase;
person.publickey = data.publickey;
}
person.avecpassphrase=(person.passphrase!="")
console.log("person:", person);
const tplemail = require(`../../nationchains/tribes/adminapi/template/createidentity_${lg}.js`)
person.message
const maildata = {
to: person.email,
subject: Mustache.render(tplemail.subject, person),
html: Mustache.render(tplemail.html, person),
text: Mustache.render(tplemail.text, person),
attachments: [
{
filename:`${person.alias}_publickey.txt`,
content: person.publickey,
contentType:"text/plain"
},
{
filename:`${person.alias}_privatekey.txt`,
content: person.privatekey,
contentType:"text/plain"
}
]
};
return Notifications.sendmail(maildata, data.tribe);
};
Pagans.authenticatedetachedSignature = async (
alias,
pubK,
detachedSignature,
message
) => {
/**
* Check that a message was signed with a privateKey from a publicKey
* This is not necessary if isAuthenticated, but can be usefull to double check
* @TODO finish it and implement it also in /apxpagan.js for browser
* @alias {string} alias link to the publicKey
* @pubK {string} publiKey text format
* @detachedSignature {string} a detachedsignatured get from apx.detachedSignature
* @message {string} the message signed
* @return {boolean} true the message was signed by alias
* false the message was not signed by alias
*/
const publicKey = await openpgp.readKey({ armoredKey: pubK });
const msg = await openpgp.createMessage({ text: message });
const signature = await openpgp.readSignature({
armoredSignature: detachedSignature, // parse detached signature
});
const verificationResult = await openpgp.verify({
msg, // Message object
signature,
verificationKeys: publicKey,
});
const { verified, keyID } = verificationResult.signatures[0];
try {
await verified; // throws on invalid signature
console.log("Signed by key id " + keyID.toHex());
return KeyId.toHex().alias == alias;
} catch (e) {
console.log("Signature could not be verified: " + e.message);
return false;
}
};
module.exports = Pagans;

646
models/Toolsbox.js Executable file
View File

@@ -0,0 +1,646 @@
/* eslint-disable no-useless-escape */
const fs = require("fs");
const path = require("path");
const bcrypt = require("bcrypt");
const moment = require("moment");
const utils = {};
console.log(
"Check in /utils/index.js to find usefull function for your dev.\n Feel free to send suggestion, code to maintainer of apxtri project (see /package.json to get email).\n We'll add to the roadmap to add it."
);
/**
* EMAIL
*/
/* const validateEmail = email => {
const regExp = /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
return regExp.test(email);
};
const validatePassword = pwd => {
const regExp = new RegExp(
/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[$@$!%*?&.])[A-Za-z\d$@$!%*?&.{}:|\s]{8,}/
);
return regExp.test(pwd);
};
const filterInvalidInArray = (array, validate) =>
array ? array.filter(el => !validate(el)) : undefined; // return undefined when every elements is valid
/**
* POSTAL CODE
*/
/*
const validatePostalCode = postalCode =>
/(^\d{5}$)|(^\d{5}-\d{4}$)/.test(postalCode);
/**
* PHONE
*/
/* const validatePhoneNumber = phoneNumber =>
/((^0[1-9]|\+[0-9]{3})([-. ]?[0-9]{2}){4}$)/.test(phoneNumber);
const correctPhoneNumber = phone =>
phone[0] === '0' ? '+33' + phone.substr(1) : phone;
const Checkjson = (appProfil, referential, data) => {
// @TODO get a referentiel per object then check data validity and allowed access
// need to add referentiel manager
const invalidefor = [];
let updateDatabase = false;
Object.keys(data).forEach(field => {
switch (field) {
case 'token':
updateDatabase = true;
break;
case 'email':
if (!validateEmail(data.email)) {
invalidefor.push('ERREMAIL:' + field);
} else {
updateDatabase = true;
}
break;
case 'password':
if (!validatePassword(data.password)) {
invalidefor.push('ERRPWD:' + field);
} else {
data.password = bcrypt.hash(data.password, config.saltRounds);
updateDatabase = true;
}
break;
}
});
return { invalidefor, data, updateDatabase };
};
*/
//Permet d'attendre en milliseconde
// s'utilise avec async ()=>{
// await sleep(2000)
//}
utils.sleep = (ms) => {
return new Promise((resolve) => setTimeout(resolve, ms));
};
utils.normalize = {};
utils.normalize.telephonefr = (phone) => {
phone = phone.trim().replace(/[- .]/g, "");
if (
Checkjson.schema.properties.format.telephoenfr(phone) &&
phone.length == 10 &&
phone[0] == "0"
) {
phone = "+33 " + phone.substring(1);
}
return phone;
};
utils.normalize.zfill10 = (num) => {
let s = num + "";
while (s.length < 10) s = "0" + s;
return s;
};
utils.generemdp = (nbpos, fromchar) => {
if (!fromchar) {
const fromchar = "ABCDEFGHIJKLMNPQRSTUVWZY123456789";
}
//const chaine = "ABCDEFGHIJKLMNPQRSTUVWZY123456789";
let mdp = "";
for (var i = 0; i < nbpos; i++) {
var pos = Math.floor(Math.random() * fromchar.length);
mdp += fromchar.substring(pos, pos + 1);
}
return mdp;
};
utils.generecompteur = (filecpt, typeincrement) => {
let file = `${filecpt}/${typeincrement}.json`;
let prefix = "";
if ((typeincrement = "ANNEESEMAINE")) {
file = `${filecpt}/${typeincrement}${moment().format(
"YYYY"
)}${moment().format("WW")}.json`;
prefix = `${moment().format("YYYY")}${moment().format("WW")}`;
}
let num = 1;
try {
num = parseInt(fs.readFileSync(file, "utf8")) + 1;
} catch (err) {
console.log("Nouveau compteur incrementale ", file);
}
fs.writeFileSync(file, num, "utf8");
return prefix + num;
};
/**
* CSV
*/
utils.json2csv = (jsondata, options, callback) => {
// uniquement json = [{niv1:val,niv1:[liste of val]}]
// console.log('_________________________');
// console.log(jsondata)
// console.log('_________________________');
if (jsondata.length == 0) {
return callback("Empty json", null);
}
if (!options.retln) options.retln = "\n";
if (!options.sep) options.sep = ";";
if (!options.arraysplitsep) options.arraysplitsep = ",";
if (!options.replacespecialcarJson2Csv) {
options.replacespecialcarJson2Csv = [];
} else {
if (typeof options.replacespecialcarJson2Csv == "string") {
//permet de passer des regex en string
options.replacespecialcarJson2Csv = eval(
options.replacespecialcarJson2Csv
);
}
}
let etat = "";
let csv = "";
let entete = "";
let prem = true;
for (const j in jsondata) {
// console.log(jsondata[j])
for (const c in options.champs) {
if (prem) {
entete += options.champs[c] + options.sep;
}
if (jsondata[j][options.champs[c]]) {
if (options.array.indexOf(options.champs[c]) > -1) {
csv +=
jsondata[j][options.champs[c]].join(options.arraysplitsep) +
options.sep;
} else {
let currentValue = "";
if (jsondata[j][options.champs[c]])
currentValue += jsondata[j][options.champs[c]];
options.replacespecialcarJson2Csv.forEach((re) => {
//console.log(currentValue)
currentValue = currentValue.replace(re[1], re[0]);
});
csv += currentValue + options.sep;
}
} else {
csv += options.sep;
}
}
csv = csv.substring(0, csv.length - 1) + options.retln;
if (prem) {
prem = false;
entete = entete.substring(0, entete.length - 1) + options.retln;
// console.log(entete)
}
}
// return entete + csv;
if (etat == "") {
return callback(null, entete + csv);
} else {
return callback(etat, null);
}
};
/**
* Get headers from first line of CSV
* @param {array} lines array of string which contains each csv lines
* @return {array} string array of headers
*/
utils.getHeaders = (lines, sep) =>
lines[0].split(sep).map((i) => i.replace(/"/g, ""));
/**
* [csv2json description]
* @param {object} csv object of csv file that has been read
* @param {object} options object containing csv options, headers, ...
{retln:'code de retour de ligne \n ou \n\r',
sep:'code to split cells',
champs:[ch1,ch2,...] catch only those field,
array:[ch1, ] can have more than one field champs with same name then data are push into an array }
* @param {Function} callback callback function
* @return {callback} - return an error if error, else return json
it convert a csv file into a json = [{field:value}]
Usage example:
fiche.csv2article = (err, fiche) => {
if (!err) {
console.log(fiche)
}
}
utils.csv2json(fs.readFileSync('./devdata/tribee/aubergenville/infoexterne/localbusiness.csv', 'utf-8'), {
retln: "\n",
sep: ";",
champs: ["NOM", "OBJET", "ADRESSE_PRO", "CP_PRO", "VILLE_PRO", "ZONE", "PHONE_PRO", "HORAIRESDESC", "HORAIREDATA", "URL", "FACEBOOK", "INSTA", "EMAIL_PRO", "IMG", "TAG"],
array: ["TAG", "PHONE_PRO", "EMAIL_PRO"]
}, fiche.csv2article)
*/
utils.replacecarbtweendblquote = (csv, car, carremplacant) => {
/*
return csv text with any car betwenn 2 " by CARSEPARATOR
*/
let newcsv = "";
let txtencours = "";
let flagouvert = false;
const sepreg = new RegExp(`${car}`, "gmi");
for (let j = 0; j < csv.length; j++) {
//if((csv[j] == "\"" && csv[j + 1] && csv[j + 1] != "\"") || (csv[j] == "\"" && csv[j - 1] && csv[j - 1] != "\"") || (csv[j] == "\"" && csv[j - 1] && csv[j - 2] && csv[j - 1] != "\"" && csv[j - 2] != "\"")) {
if (csv[j] == '"') {
if (flagouvert) {
// on cherche à ferme une chaine de texte
if (csv[j + 1] == '"') {
//on a "" consecutif qu'on remplace par "" et on fait j+1
txtencours += '""';
j++;
} else {
// on a bien une fermeture
flagouvert = false;
newcsv += txtencours.replace(sepreg, carremplacant);
txtencours = '"';
}
} else {
// on ouvre une chaine
flagouvert = true;
//on met le contenu précédent ds newcsv
newcsv += txtencours;
txtencours = '"';
}
//} else if((csv[j] !== "\n") && (csv[j + 1] && csv[j] + csv[j + 1] !== "\n\r")) {
} else if (csv[j] !== "\n") {
txtencours += csv[j];
// } else if((csv[j] == "\n") || (csv[j + 1] && csv[j] + csv[j + 1] == "\n\r")) {
} else if (csv[j] == "\n") {
if (!flagouvert) txtencours += "\n";
}
}
return newcsv + txtencours;
};
utils.analysestring = (string) => {
let buftxt = "";
let bufcode = "";
let i = 0;
let avecRL = false;
for (let p = 0; p < string.length; p++) {
if (string[p].charCodeAt() == 10) {
buftxt += "[RL]";
avecRL = true;
} else {
buftxt += string[p];
}
bufcode += "-" + string[p].charCodeAt();
if (i == 20) {
if (avecRL) {
console.log(`${buftxt} - ${bufcode}`);
} else {
console.log(`${buftxt} ---- ${bufcode}`);
}
i = 0;
buftxt = "";
bufcode = "";
avecRL = false;
}
i++;
}
};
const txtstring = `32932,BK_F2F_B_COM_10x1H-09,"My Communication Workshop ""Session N°9 - 1H""","<p>&nbsp;</p>
<table>
<tbody>
<tr>
<td>
<p>Learner who needs to develop their ability to communicate effectively at work, both in writing and speaking</p>
</td>
</tr>
</tbody>
</table>",,english,2,0,,2,0,classroom,"0000-00-00 00:00:00","0000-00-00 00:00:00",0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32932,1101,,"BUSINESS KEYS",0,
32933,BK_F2F_B_COM_10x1H-10,"My Communication Workshop Session N°10 - 1H","<p>&nbsp;</p>
<table>
<tbody>
<tr>
<td>
<p>Learner who needs to develop their ability to communicate effectively at work, both in writing and speaking</p>
</td>
</tr>
</tbody>
</table>",,english,2,0,,2,0,classroom,"0000-00-00 00:00:00","0000-00-00 00:00:00",0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32933,1101,,"BUSINESS KEYS",0,
32934,BK_F2F_B_JOB_10x1H-01,"My Job Search Workshop Session N°1 - 1H","<p>PACK JOB SEARCH</p>",,english,2,0,,2,0,classroom,,,0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32934,1108,,,0,
32935,BK_F2F_B_JOB_10x1H-02,"My Job Search Workshop Session N°2 - 1H","<p>PACK JOB SEARCH</p>",,english,2,0,,2,0,classroom,,,0000-00-00,0000-00-00,https://www.yesnyoulearning.com/lms/index.php?r=player&course_id=32935,1108,,,0,`;
//utils.analysestring(txtstring)
//console.log(utils.replacecarbtweendblquote(txtstring, ",", 'CARSEPARATOR')
// .split("\n")[0].split(","))
utils.csv2json = (csv, options, callback) => {
// EN CAS DE PB AVEC UN FICHIER EXCEL RECALCITRANT
// l'ouvrir dans calc linux et sauvegarder csv utf8, ; , " enregistrer le contenu de la cellule comme affiché
console.log("\n--------------- CSV2JSON ---------------\n");
// Default CSV options
if (!options.retln) options.retln = "\n";
if (csv.indexOf("\n\r") > -1) options.retln = "\n\r";
if (!options.sep) options.sep = ";";
//gestion d un separateur dans une chaine de texte
//const regseptext = new RegExp(`${options.sep}(?!(?:[^"]*"[^"]*")*[^"]*$)`, 'gm');
//csv = csv.replace(regseptext, "CARACSEPAR");
// csv = utils.replacecarbtweendblquote(csv, options.retln, "RETLIGNE")
csv = utils.replacecarbtweendblquote(csv, options.sep, "CARSEPARATOR");
if (!options.replacespecialcarCsv2Json) {
options.replacespecialcarCsv2Json = [];
} else {
if (typeof options.replacespecialcarCsv2Json == "string") {
//permet de passer des regex en string
options.replacespecialcarCsv2Json = eval(
options.replacespecialcarCsv2Json
);
}
}
const result = [];
const lines = csv.split(options.retln);
const headers = utils.getHeaders(lines, options.sep);
let unknownHeaders = "";
//console.log('headers', headers)
//console.log('options.champs', options.champs)
headers.forEach((header) => {
// Si un header n'est pas présent dans la liste des champs prédéfinis
// on l'ajoute aux champs inconnus
if (options.champs.indexOf(header) === -1) {
unknownHeaders += `${header}, `;
}
});
if (unknownHeaders !== "") {
const errorMsg = `CSV2JSON() - Champs inconnus : ${unknownHeaders}`;
return callback(errorMsg, null);
}
lines.forEach((line, index) => {
// Skip headers line or empty lines
if (index === 0 || line.replace(/\s/g, "").length === 0) {
return;
}
// pour debuguer on met origincsv pour voir la ligne d'origine
const currentLineData = { origincsv: line, linenumber: index };
const currentLine = line.split(options.sep); // Current string in the line
for (let j = 0; j < headers.length; j++) {
// Si la ligne n'est pas vide
if (currentLine[j]) {
// On clean le champs
// ajout eventuel de modification de caracter reservé ; dans les libelléetc...
let currentValue = currentLine[j].trim();
//on transforme le caractere separateur modifié entre double quote
currentValue = currentValue.replace("CARSEPARATOR", options.sep);
options.replacespecialcarCsv2Json.forEach((re) => {
currentValue = currentValue.replace(re[0], re[1]);
});
// Si le header est un email
if (headers[j].includes("EMAIL")) {
// Supprimer tous les espaces
currentValue = currentLine[j].replace(/\s/g, "");
}
// on check si le chamos doit être numerique
if (options.numericfield.includes(headers[j])) {
currentValue = currentLine[j].replace(/\,/g, ".");
try {
const test = parseFloat(currentValue);
} catch (er) {
return callback(
`${headers[j]} contiens la valeur -${currentValue}- et devrait être numerique`,
null
);
}
}
if (currentValue) {
// Si le header actuel est de type array
// Cela signifie que le header apparaît plusieurs fois dans le CSV
// et que les valeurs correspondantes à ce header
// doivent être mis dans un array
if (options.array && options.array.indexOf(headers[j]) > -1) {
// Si le tableau pour ce header n'existe pas on le crée
if (!currentLineData[headers[j]]) {
currentLineData[headers[j]] = [];
}
if (options.arraysplitsep) {
currentValue.split(options.arraysplitsep).forEach((v) => {
currentLineData[headers[j]].push(v);
});
} else {
currentLineData[headers[j]].push(currentValue);
}
} else {
// Si un header est déjà présent pour la ligne
// alors que il n'est pas spécifié comme étant un array
// on retourne une erreur
if (currentLineData[headers[j]]) {
const errorMsg = `Le champ ${headers[j]} est présent plusieurs fois alors qu'il n'est pas spécifié comme étant un array !`;
return callback(errorMsg, null);
}
currentLineData[headers[j]] = currentValue;
}
}
}
}
result.push(currentLineData);
});
return callback(null, result);
};
/**
* [csvparam2json description]
* @param {object} csv object of csv file that has been read
* @param {object} options object containing csv options, headers, ...
{retln:'code de retour de ligne \n ou \n\r',
sep:'code to split cells',
champs:[ch1,ch2,...] catch only those field,
array:[ch1, ] can have more than one field champs with same name then data are push into an array }
* @param {Function} callback callback function
* @return {callback} - return an error if error, else return json
it converts a csv with 3 column col1;col2;col3 in a json in a tree
if in col1 we have __ => then it splits a leaf
col1 = xxxx__yyyy ; col2 = value ; col3 = comment that is ignored
return data = {xxxx:{yyyy:value}}
col1 = xxxx; col2 = value; col3 = comment ignored
return data = {xxxx:value}
Usage example:
fiche.csvparam2article = (err, fiche) => {
if (!err) {
console.log(fiche)
}
}
utils.csvparam2json(fs.readFileSync('./devdata/tribee/aubergenville/infoexterne/localbusiness.csv', 'utf-8'), {
retln: "\n",
sep: ";",
champs: ["NOM", "OBJET", "ADRESSE_PRO", "CP_PRO", "VILLE_PRO", "ZONE", "PHONE_PRO", "HORAIRESDESC", "HORAIREDATA", "URL", "FACEBOOK", "INSTA", "EMAIL_PRO", "IMG", "TAG"],
array: ["TAG", "PHONE_PRO", "EMAIL_PRO"]
}, fiche.csv2article)
*/
utils.csvparam2json = (csv, options, callback) => {
console.log("\n--------------- CSVPARAM2JSON ---------------\n");
let etat = "";
const param = {};
if (!options.retln) {
options.retln = "\n";
}
if (csv.indexOf("\n\r") > -1) {
options.retln = "\n\r";
}
if (!options.sep) {
options.sep = ";";
}
if (!options.seplevel) {
options.seplevel = "__";
}
if (!options.replacespecialcarCsv2Json) {
options.replacespecialcarCsv2Json = [];
} else {
if (typeof options.replacespecialcarCsv2Json == "string") {
//permet de passer des regex en string
options.replacespecialcarCsv2Json = eval(
options.replacespecialcarCsv2Json
);
}
}
const lines = csv.split(options.retln);
for (let i = 0; i < lines.length; i++) {
const infol = lines[i].split(options.sep);
//console.log(infol)
if (infol[0].length > 4 && infol.length < 2) {
// si le 1er element à plus de 4 caractere et s'il y a moins de 3 colonnes c'est qu'il y a un pb
etat += `Erreur sur ${lines[i]} moins de 3 column separé par ${options.sep}`;
continue;
}
// On ajoute ici la gestion de tous les caracteres spéciaux
// reservées pour le csv ; ' etc..'
if (infol[1] && infol[1] + "" == infol[1]) {
options.replacespecialcarCsv2Json.forEach((re) => {
//console.log("gggggggggggggggggggg", infol[1])
infol[1] = infol[1].replace(re[0], re[1]);
});
// console.log(infol[1])
infol[1] = infol[1].replace(/'|/g, '"');
//console.log(infol[1])
if (infol[1].toLowerCase() === "true") {
infol[1] = true;
} else if (infol[1].toLowerCase() === "false") {
infol[1] = false;
}
}
console.log(infol[1]);
//supprime des lignes vides
if (infol[0] == "") continue;
if (infol[0].indexOf(options.seplevel) == -1) {
param[infol[0]] = infol[1];
continue;
} else {
const arbre = infol[0].split(options.seplevel);
switch (arbre.length) {
case 1:
param[arbre[0]] = infol[1];
break;
case 2:
if (arbre[1] != "ARRAY") {
if (!param[arbre[0]]) param[arbre[0]] = {};
param[arbre[0]][arbre[1]] = infol[1];
} else {
if (!param[arbre[0]]) param[arbre[0]] = [];
//console.log('aff', infol[1].substring(1, infol[1].length - 1).replace(/""/g, '"'))
eval("result=" + infol[1]);
//.substring(1, infol[1].length - 1).replace(/""/g, '"'))
param[arbre[0]].push(result);
}
break;
case 3:
if (arbre[2] != "ARRAY") {
if (!param[arbre[0]]) param[arbre[0]] = {};
if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = {};
param[arbre[0]][arbre[1]][arbre[2]] = infol[1];
} else {
if (!param[arbre[0]]) param[arbre[0]] = {};
if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = [];
//eval("result = \"test\"");
//console.log(result);
eval("result=" + infol[1]);
//.substring(1, infol[1].length - 1).replace(/""/g, '"'))
param[arbre[0]][arbre[1]].push(result);
}
break;
case 4:
if (arbre[3] != "ARRAY") {
if (!param[arbre[0]]) param[arbre[0]] = {};
if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = {};
if (!param[arbre[0]][arbre[1]][arbre[2]])
param[arbre[0]][arbre[1]][arbre[2]] = {};
param[arbre[0]][arbre[1]][arbre[2]][arbre[3]] = infol[1];
} else {
if (!param[arbre[0]]) param[arbre[0]] = {};
if (!param[arbre[0]][arbre[1]]) param[arbre[0]][arbre[1]] = {};
if (!param[arbre[0]][arbre[1]][arbre[2]])
param[arbre[0]][arbre[1]][arbre[2]] = [];
eval("result=" + infol[1]);
//.substring(1, infol[1].length - 1).replace(/""/g, '"'))
param[arbre[0]][arbre[1]][arbre[2]].push(result);
break;
}
default:
break;
}
}
}
// JSON.parse(JSON.stringify(param))
console.log(
"kkkkkkkkkkkkkkkkkk",
param["catalogue"]["filtrecatalog"]["searchengine"]
);
if (etat == "") {
return callback(null, JSON.parse(JSON.stringify(param)));
} else {
return callback(etat, null);
}
};
utils.levenshtein = (a, b) => {
if (a.length === 0) return b.length;
if (b.length === 0) return a.length;
let tmp, i, j, prev, val, row;
// swap to save some memory O(min(a,b)) instead of O(a)
if (a.length > b.length) {
tmp = a;
a = b;
b = tmp;
}
row = Array(a.length + 1);
// init the row
for (i = 0; i <= a.length; i++) {
row[i] = i;
}
// fill in the rest
for (i = 1; i <= b.length; i++) {
prev = i;
for (j = 1; j <= a.length; j++) {
if (b[i - 1] === a[j - 1]) {
val = row[j - 1]; // match
} else {
val = Math.min(
row[j - 1] + 1, // substitution
Math.min(
prev + 1, // insertion
row[j] + 1
)
); // deletion
}
row[j - 1] = prev;
prev = val;
}
row[a.length] = prev;
}
return row[a.length];
};
utils.testinarray = (array, arrayreferent) => {
// au moins un element de array existe dans arryreferent
let exist = false;
if (arrayreferent) {
//console.log('arrrrrrrrrrrrrrr', arrayreferent)
array.forEach((e) => {
//console.log(e)
if (arrayreferent.includes(e)) exist = true;
});
}
return exist;
};
/*
DIRECTORY
*/
const isDirectory = (source) => fs.lstatSync(source).isDirectory();
const getDirectories = (source) =>
fs
.readdirSync(source)
.map((name) => path.join(source, name))
.filter(isDirectory);
module.exports = utils;

208
models/Towns.js Normal file
View File

@@ -0,0 +1,208 @@
const bcrypt = require("bcrypt");
const fs = require("fs-extra");
const glob = require("glob");
const moment = require("moment");
const jwt = require("jwt-simple");
const UUID = require("uuid");
const conf = require(`../../conf/townconf.json`);
const Checkjson = require(`./Checkjson.js`);
const Odmdb = require("./Odmdb.js");
const Towns = {};
Towns.create = () => {
// Create a new town from conf (generate in apxtrie.js if town not already exist in the server)
console.log(
`RUNNING A NEW SETUP with nation ${conf.nationId} and town ${conf.townId} to be accessible in dns http://${conf.dns}`
);
const initconf = fs.readJSONSync(
`${conf.dirapi}/adminapi/www/adminapx/initconf.json`
);
// Synchronize nationchains/
const { updateobjectsfromfreshesttown } = require("./api/models/Nations.js");
updateobjectsfromfreshesttown(initconf.towns, {
pagans: "alias_all.json",
towns: "townId_all.json",
nations: "nationId_all.json",
});
initconf.dirapi = conf.dirapi;
initconf.dirtown = conf.dirtown;
initconf.nationId = conf.nationId;
initconf.townId = conf.townId;
initconf.sudoerUser = process.env.USER;
if (!initconf.dns.includes(conf.dns)) {
initconf.dns.push(conf.dns);
}
initconf.nginx.include.push(`${initconf.dirapi}/adminapi/www/nginx_*.conf`);
initconf.nginx.include.push(`${initconf.dirtown}/tribes/**/www/nginx_*.conf`);
initconf.nginx.logs = `${initconf.dirtown}/logs/nginx/adminapx`;
fs.ensureDirSync(`${initconf.dirtown}/logs/nginx`);
fs.ensureDirSync(`${initconf.dirtown}/tmp/tokens`);
initconf.nginx.website = "adminapx";
initconf.nginx.fswww = `${initconf.dirapi}/adminapi/www`;
initconf.nginx.pageindex = "index_en.html";
const { exec } = require("child_process");
exec(
`sudo chown -R ${process.env.USER}:${process.env.USER} /etc/nginx`,
(error, stdout, stderr) => {
if (error) {
console.log("\x1b[42m", error, stdout, stderr, "x1b[0m");
console.log(
`impossible to change owner of /etc/nginx by ${initconf.sudoerUser}:${initconf.sudoerUser}`
);
fs.removeSync(initconf.dirtown);
process.exit();
} else {
console.log(
`successfull sudo chown -R ${process.env.USER}:${process.env.USER} /etc/nginx`
);
}
}
);
// create town env
fs.outputJsonSync(`${initconf.dirtown}/conf.json`, initconf, { space: 2 });
const nginxconf = fs.readFileSync(
"./adminapi/www/adminapx/conf/nginx.conf.mustache",
"utf8"
);
const proxyparams = fs.readFileSync(
"./adminapi/www/adminapx/conf/nginxproxyparams.mustache",
"utf8"
);
const websiteconf = fs.readFileSync(
"./adminapi/www/adminapx/conf/nginxmodelwebsite.conf.mustache",
"utf8"
);
// saved and change nginx conf
if (!fs.existsSync("/etc/nginx/nginxconf.saved")) {
fs.moveSync("/etc/nginx/nginx.conf", "/etc/nginx/nginxconf.saved");
console.log(
"your previous /etc/nginx/nginx.conf was backup in /etc/nginx/nginxconf.saved"
);
}
fs.outputFileSync(
"/etc/nginx/nginx.conf",
mustache.render(nginxconf, initconf),
"utf8"
);
fs.outputFileSync(
"/etc/nginx/proxy_params",
mustache.render(proxyparams, initconf),
"utf8"
);
fs.outputFileSync(
`${initconf.dirapi}/adminapi/www/nginx_adminapx.conf`,
mustache.render(websiteconf, initconf),
"utf8"
);
exec(initconf.nginx.restart, (error, stdout, stderr) => {
if (error) {
console.log("\x1b[42m", error, stdout, stderr, "x1b[0m");
//@todo supprimer la derniere config nginx et relancer
fs.moveSync("/etc/nginx/nginxconf.saved", "/etc/nginx/nginx.conf");
console.log("Restart yarn dev with correct parameter");
// cleanup
fs.removeSync(initconf.dirtown);
} else {
//@TODO à finaliser en test sur machien pour creation de nouvelles villes
// add town in nationchains
const gettown = Odmdb.get(`${initconf.dirapi}/nationchains`, "towns", [
initconf.townId,
]);
if (gettown.data[initconf.townId] == "notfound") {
Odmdb.create(
`${initconf.dirapi}/nationschains`,
"towns",
{
townId: initconf.townId,
nationId: initconf.nationId,
dns: initconf.dns,
IP: "127.0.0.1",
status: "unchain",
tribes: [],
},
false
);
} else if (gettown.data[initconf.townId].dns !== initconf.dns) {
//reinstallation d'une town sur un autre serveur maj du dns , l'ip le status et les tribes se mettent à jour via l'interface
const updtown = Odmdb.update(
`${initconf.dirapi}/nationchains`,
"towns",
{ dns: initconf.dns },
initconf.townId
);
}
console.log(`ready to use http://${initconf.dns}`);
}
});
};
Towns.changeowner = async (newowner, requestby) => {
/**
*
*/
if (!fs.existsSync(`./nationchains/pagans/itm/${newowner}.json`)) {
return {
status: 404,
ref: "towns",
msg: "newownerdoesnotexist",
data: { alias: newowner },
};
}
if (!conf.mayorId || conf.mayorId == requestby) {
// update object town + town/conf.json + setup_xx.json
const gettown = Odmdb.get(`../nationchains`, "towns", [
conf.townId,
]);
console.log(`before town: ${conf.townId}`, gettown);
if (gettown.data[conf.townId] == "notfound") {
return {
status: 404,
ref: "towns",
msg: "townIdnotfound",
data: { townId: conf.townId },
};
}
gettown.data[conf.townId].mayorId = newowner;
const objup = await Odmdb.update(
`../nationchains`,
"towns",
gettown.data[conf.townId],
conf.townId
);
//update the itm town
if (objup.status != 200) {
return objup;
}
console.log(`after town update: ${conf.townId}`, gettown);
conf.mayorId = newowner;
fs.outputJsonSync(`../conf.json`, conf);
const setup = fs.readJSONSync(
`${conf.dirapi}/adminapi/www/adminapx/conf/setup_xx.json`
);
conf.mayorId = newowner;
//update the setup file for webapp adminapi
fs.outputJsonSync(
`${conf.dirapi}/adminapi/www/adminapx/conf/setup_xx.json`,
setup
);
return {
status: 200,
ref: "towns",
msg: "newownerchangesuccess",
data: { alias: newowner },
};
}
return {
status: 403,
ref: "towns",
msg: "notallow",
data: { newowner, currentowner: conf.mayorId },
};
};
module.exports = Towns;

29
models/Trackings.js Normal file
View File

@@ -0,0 +1,29 @@
const glob = require("glob");
const path = require("path");
const fs = require("fs-extra");
const dayjs = require("dayjs");
const axios = require("axios");
const conf = require(`../../conf/townconf.json`);
const Checkjson = require(`./Checkjson.js`);
const Trackings = {}
/**
* Tracking system management
*
* Data collection is done from nginx log system see routes/trackings.js for doc
*/
/**
* Process plan to run each night or on demand to collect log data and cleanup
*/
Trackings.logcollection=()=>{
}
Trackings.dashboard=(graphname)=>{
console.log('Process data to provide a specific graph')
}
module.export = Trackings;

377
models/Tribes.js Executable file
View File

@@ -0,0 +1,377 @@
const bcrypt = require( 'bcrypt' );
const fs = require( 'fs-extra' );
const path = require( 'path' );
const glob = require( 'glob' );
const Mustache = require( 'mustache' );
const execSync = require( 'child_process' )
.execSync;
const dnsSync = require( 'dns-sync' );
const jwt = require( 'jwt-simple' );
const moment = require( 'moment' );
const UUID = require( 'uuid' );
const Pagans = require( './Pagans.js' );
const conf = require(`../../conf/townconf.json`);
const Checkjson = require( `./Checkjson.js`);
/*
tribeid manager
@TODO @STUDY
To add a tribe in dirtown/tribes with a mayor phil
see man adduser and file reference call skelet directory to set an env for apxtri in /home/tribename/
accessible by tribename/password
then add group group me to phil to allow phil to ate a symlink /dirtown/tribes/tribename => to /home/tribename
At each reboot run a process to analyse /api/routes and api/models whre only js can be exexuted are safe (only write data into /home/tribename, never outside)
1- Create a user in linux with $ sudo useradd smatchit
2 => this create a user:group and a folder smatchit in /home/phil/dirtown/tribes/
2 => add group smatchit to phil to allow phil to access file with a group accessright
3 set a password if needed "$sudo passwd smatchit" (sm@tchit) to smatchit to make it available from ssh on port 22
4
4 to delete a user sudo userdel smatchit (this keep folder smatchit to remove folder smatchit => sudo userdel --remove smacthit)
/tribes/tribeid
Manage a tribeid space
* create
* update by managing option and contract
* delete a tribeid
* check accountability and
*/
const Tribes = {};
Tribes.init = () => {
console.group( 'init Tribes' );
let tribeids = [];
let routes = glob.sync( './routes/*.js' )
.map( f => {
return { url: `/${path.basename(f,'.js')}`, route: f }
} );
let DOMs = [];
let appname = {};
TribesGlobalConfig = glob.sync( `${conf.tribes}/**/clientconf.json` )
.map( f => {
const conf = fs.readJSONSync( f );
// check if plugins exist and add it in .plugins of each tribeid conf
conf.plugins = glob.sync( `${conf.tribes}/${conf.tribeid}/plugins/**/package.json` )
.map( p => {
const pack = fs.readJsonSync( p, 'utf8' );
routes.push( { url: `/${pack.name}`, route: `${conf.tribes}/${conf.tribeid}/plugins/${pack.name}/route.js` } );
return pack;
} );
//Add here any other info to get a global view and init
//...
tribeids.push( conf.tribeid );
DOMs = [ ...new Set( [ ...DOMs, ...conf.allowedDOMs ] ) ];
if( conf.website ) appname[ conf.tribeid ] = Object.keys( conf.website )
return conf;
} );
// store global conf fofs.existsSync( `${conf.tmp}/clientconfglob.json` )r sharing to other api
fs.outputJsonSync( `${conf.tmp}/clientconfglob.json`, TribesGlobalConfig, {
spaces: 2
} );
return { tribeids, routes, DOMs, appname }
}
Tribes.create = ( data ) => {
/* data = clientconf.json
{
"tribeid": "apxtri",
"genericpsw": "Trze3aze!",
"website": {
"presentation":"https://www.apxtri.org",
"webapp": "https://webapp.apxtri.org"
},
"allowedDOMs": ["local.fr", "localhost:9002", "ndda.fr", "apxtri.org"],
"clientname": "apxtri",
"clientlogo": "",
"geoloc": [],
"useradmin": {PUBKEY:"",EMAIL:"",LOGIN:"adminapxtri",UUID:"adminapxtri"},
"smtp": {
"emailFrom": "support@apxtri.org",
"emailcc": [],
"service": "gmail",
"auth": {
"user": "antonin.ha@gmail.com",
"pass": "Ha06110"
}
},
"accepted-language": "fr,en",
"langueReferential": ["fr"]
}
What about:
"tribeid": same than the folder where all the client's file are stored
"genericpsw": a generic password for new user need upper lowercase number ans special char
"dnsname": a domain name belonging to the client
"subdns": "www", a sub domain subdns.dnsname give a public web access to
"website": { keywebsite:url}, give access to conf.tribes/tribeid/www/keywebsite/index.html,
"allowedDOMs": ["local.fr", "localhost:9002", "nnda.fr"], //for CORS, @TODO generate from prévious URL this allow this apxtri instance to be accessible
"clientname": Name of the organisation if any,
"clientlogo": logo of the organisation if any,
"geoloc": [], if any
"useradmin": { this is the 1st user create automaticaly to make gui available for the 1st user
"PUBKEY":public key to be authentify without an email,
"EMAIL":user email, we need at least one of authentification set up after the user can use both or only one
"LOGIN": login to use for access admintribeid,
"UUID": unique id normaly UUID but a uuid admintribeid is the same person in any apxtri instance so we use it by convention.
"xlang": lang used by this user
},
"smtp": { smtp used to send email by nodemailer lib basic example with a google account
"emailFrom": "support@xx.fr",
"emailcc": [],
"service": "gmail",
"auth": {
"user": "antonin.ha@gmail.com",
"pass": "Ha06110"
}
},
"accepted-language": "fr,en", list of accepted-language in terme of http request.
"langueReferential": ["fr"], list of the text that have to be translate in referentials
}
*/
//update tmp/confglog.json
const dataclient = Tribes.init();
//return in prod all instance apxinfo={tribeids:[],logins:[]}
// in dev return only local
//check tribeid name is unique
console.log( 'liste des tribeid', dataclient.tribeids )
if( dataclient.tribeids.includes( data.tribeid ) ) {
return { status: 403, payload: { model: "client", info: [ 'tribeidalreadyexist' ] } }
}
//loginsglob = {login:tribeid}
let loginsglob = {};
if( fs.existsSync( `${conf.tmp}/loginsglob.json`, 'utf-8' ) ) {
loginsglob = fs.readJsonSync( `${conf.tmp}/loginsglob.json`, 'utf-8' );
}
const logins = Object.keys( loginsglob );
if( logins.includes( data.useradmin.login ) ) {
return { status: 403, payload: { model: "client", info: [ 'loginalreadyexist' ] } }
}
fs.ensureDirSync( `${conf.tribes}/${data.tribeid}` );
[ 'users', 'www', 'referentials', 'nationchains' ].forEach( r => {
fs.copySync( `${__dirapi}/setup/tribes/apxtri/${r}`, `${conf.tribes}/${data.tribeid}/${r}` );
} )
fs.outputJsonSync( `${conf.tribes}/${data.tribeid}/clientconf.json`, data );
const confcli = JSON.parse( Mustache.render( fs.readFileSync( `${__dirapi}/setup/tribes/apxtri/clientconf.mustache`, 'utf8' ), data ) );
fs.outputJsonSync( `${conf.tribes}/${data.tribeid}/clientconf.json`, confcli );
return Pagans.createUser( {
xpaganid: "setup",
xworkon: data.tribeid,
xlang: data.useradmin.xlang
}, data.useradmin );
};
Tribes.archive = ( tribeid ) => {
//A faire zip un repertoire tribeid dans
// remove tribeid de data ou devdata
try {
fs.moveSync( `${conf.tribes}/${tribeid}`, `${conf.archivefolder}/${tribeid}` );
//update apxtrienv
Tribes.init();
return { status: 200, payload: { info: [ 'deletetribeidsuccessfull' ], models: 'Tribes', moreinfo: "TODO see in Tribes.archive" } }
} catch ( err ) {
console.log( "Erreur d'archivage", err )
return { status: 403, payload: { info: [ 'archiveerror' ], models: 'Tribes', moreinfo: err } }
}
}
////////////// Manage file for Tribes
Tribes.checkaccessfolder = ( folder, typeaccessrequested, useraccessrights, useruuid ) => {
// check folder right
}
Tribes.checkaccessfiles = ( listfile, typeaccessrequested, useraccessrights, useruuid ) => {
// @listfile to check accessright on file or folder
// @typeaccessrequested on files R read or download, U for pdate, D for delete , O for owned a Owner has all rights RUD on its files
// @useraccessright from its account /userd/uuid.json
// @useruuid public uuid user
// return {'ok':[file auhtorized],'ko':[files not authorized]}
const checkauthlistfile = { 'ok': [], 'ko': [] }
let structf = []
let inforep = { file: {}, dir: {} }
let done;
for( const f of listfile ) {
done = false;
if( !fs.existsSync( `${conf.tribes}/${f}` ) ) {
done = true;
checkauthlistfile.ko.push( f )
console.log( `${f} file does not exist` )
} else {
structf = f.split( '/' );
}
//on ckeck tribeid existe / tribeid/object/
if( !done &&
useraccessrights.data[ structf[ 0 ] ] &&
useraccessrights.data[ structf[ 0 ] ][ structf[ 1 ] ] &&
useraccessrights.data[ structf[ 0 ] ][ structf[ 1 ] ].includes( typeaccessrequested ) ) {
done = true;
checkauthlistfile.ok.push( f );
} else {
// check if in folder we have a.info.json .file[f].shared{useruuid:'CRUDO'}
console.log( 'structf', structf )
if( fs.existsSync( `${conf.tribes}/${structf.slice(0,-1).join('/')}/.info.json` ) ) {
inforep = fs.readJsonSync( `${conf.tribes}/${structf.slice(0,-1).join('/')}/.info.json`, 'utf8' )
}
console.log( `no accessrights for ${f} for ${useruuid} ` )
}
if( !done && inforep.file[ f ] && inforep.file[ f ] && inforep.file[ f ].shared && inforep.file[ f ].shared[ useruuid ] && inforep.file[ f ].shared[ useruuid ].includes( typeaccessrequested ) ) {
done = true;
checkauthlistfile.ok.push( f )
}
// If no authorization then ko
if( !done ) {
checkauthlistfile.ko.push( f )
}
} // end loop for
//console.log( 'checkauthlistfile', checkauthlistfile )
return checkauthlistfile;
}
Tribes.dirls = ( tribeid, dir ) => {
/*
Return list of file into tribeid/dir
*/
let comment = { src: `${tribeid}/${dir}`, file: {}, dir: {} };
if( fs.existsSync( `${conf.tribes}/${tribeid}/${dir}/.info.json` ) ) {
comment = fs.readJsonSync( `${conf.tribes}/${tribeid}/${dir}/.info.json`, 'utf-8' );
}
const listfile = []
const listdir = []
glob.sync( `${conf.tribes}/${tribeid}/${dir}/*` )
.forEach( f => {
//console.log( f )
const stats = fs.statSync( f );
// console.log( stats )
if( stats.isFile() ) {
listfile.push( path.basename( f ) )
if( !comment.file[ path.basename( f ) ] ) {
comment.file[ path.basename( f ) ] = { tags: [], info: "", thumbb64: "" };
}
comment.file[ path.basename( f ) ].mtime = stats.mtime;
comment.file[ path.basename( f ) ].ctime = stats.ctime;
comment.file[ path.basename( f ) ].size = stats.size;
}
if( stats.isDirectory() ) {
listdir.push( path.basename( f ) )
if( !comment.dir[ path.basename( f ) ] ) {
comment.dir[ path.basename( f ) ] = { tags: [], info: "", thumbb64: "" }
}
comment.dir[ path.basename( f ) ].nbfile = glob.sync( `${f}/*.*` )
.length;
comment.dir[ path.basename( f ) ].mtime = stats.mtime;
comment.dir[ path.basename( f ) ].ctime = stats.mtime;
console.log( 'comment.dir', comment.dir )
}
} );
// on remove les file or dir that was deleted
Object.keys( comment.file )
.forEach( f => {
if( !listfile.includes( f ) ) delete comment.file[ f ]
} )
Object.keys( comment.dir )
.forEach( d => {
if( !listdir.includes( d ) ) delete comment.dir[ d ]
} )
//console.log( comment )
fs.outputJson( `${conf.tribes}/${tribeid}/${dir}/.info.json`, comment, 'utf-8' );
return { status: 200, payload: { info: [ 'succestogetls' ], models: 'Tribes', moreinfo: comment } }
};
Tribes.addspaceweb = ( data ) => {
/*
To create a public spaceweb accessible from https://dnsname/pageindex
input:
{dnsname:["archilinea.fr","www.archilinea.fr"], 1st is tha main dns other are just servername redirection
tribeid:"archilinea", from req.session.header.xworkon
website:"presentation",
pageindex:"app_index_fr.html"
mode:dev(local no ssl) | prod(IP + ssl)
}
output:
nginx conf and ssl to serve each https://dnsname to /{tribeid}/www/app/{website}
Carefull this action is executed with root and restart nginx + apxtri to work
*/
data.configdomain = conf.tribes;
data.porthttp = conf.porthttp;
console.assert( conf.loglevel == "quiet", 'data to create spaceweb:', data );
// create spaceweb app for tribeid/www/app/website/pageindexname.html
if( !fs.existsSync( `${conf.tribes}/${data.tribeid}/www/app/${data.website}` ) ) {
fs.outputFileSync( `${conf.tribes}/${data.tribeid}/www/app/${data.website}/${data.pageindex}`, `<h1>Hello ${data.tribeid} ${data.website} onto ${data.dnsname.join(',')}`, 'utf-8' )
}
//create logs folder
fs.ensureDirSync( `${conf.tribes}/${data.tribeid}/logs/nginx` );
// add nginx http config
const confnginx = fs.readFileSync( 'setup/nginx/modelwebsite.conf.mustache', 'utf-8' );
fs.outputFileSync( `/etc/nginx/conf.d/${data.dnsname[0]}.conf`, Mustache.render( confnginx, data ), 'utf-8' );
if( data.dns == "unchain" ) {
//add in /etc/hosts
let hosts = fs.readFileSync( '/etc/hosts', 'utf8' );
let chg = false;
data.dnsname.forEach( d => {
if( !hosts.includes( `127.0.0.1 ${d}` ) ) {
hosts += `\n127.0.0.1 ${d}`;
chg = true;
}
if( chg ) {
fs.outputFileSync( '/etc/hosts', hosts, 'utf8' )
}
} );
};
//Ckeck dns respond
data.dnsname.forEach( d => {
if( !dnsSync.resolve( `${d}` ) ) {
rep += `\nresolving ${d} will not responding valid IP, please setup domain redirection IP before runing this script`
}
} )
//update clienconf.json
const clientconf = fs.readJsonSync( `${conf.tribes}/${data.tribeid}/clientconf.json` );
clientconf.website[ data.website ] = data.dnsname[ 0 ];
//merge allowedDOMs in unique concat
clientconf.allowedDOMs = [ ...new Set( ...clientconf.allowedDOMs, ...data.dnsname ) ];
fs.outputJsonSync( `${conf.tribes}/${data.tribeid}/clientconf.json`, clientconf, 'utf-8' );
if( !data.setup ) {
// in setup apxtri is not running and it will be start manually at the 1st run
// in other case need to plan a restart for CORS
setTimeout( Tribes.restartapxtri, 300000, data.clienId );
}
const nginxrestart = execSync( `sudo systemctl restart nginx` )
.toString();
console.log( 'Restart nginx', nginxrestart )
if( data.mode != "unchain" ) {
// get ssl certificate ATTENTION il faut ajouter -d devant chaque domain qui redirige vers l'espace web.
const certbot = execSync( `sudo certbot --nginx -d ${data.dnsname.join(' -d ')}` )
.toString();
console.log( 'certbot is running A CHECKER POUR UNE VRAIE PROD ????', certbot )
}
//sh execution to update change requested
return {
status: 200,
payload: {
model: "Tribes",
info: [ 'webspacecreated' ],
moreinfo: "Space web well created"
}
};
}
Tribes.restartapxtri = ( tribeid ) => {
console.log( 'A restarting was requested 5mn ago from a new spacedev for ' + tribeid )
execSync( 'yarn restartpm2' );
}
module.exports = Tribes;

111
models/Wwws.js Normal file
View File

@@ -0,0 +1,111 @@
const fs = require("fs-extra");
const path = require("path");
const dnsSync = require("dns-sync");
const mustache = require("mustache");
const readlineSync = require("readline-sync");
const conf = require(`../../conf/townconf.json`);
const Wwws = {};
/*Wwws.apxtriinstall = (paramconf) => {
if (fs.existsSync(`${conf.dirtown}/conf.json`)) {
console.log("You already have a conf on this town");
process.exit();
}
//first install
const nginxconf = fs.readFileSync(
`${conf.dirapi}/adminapi/www/adminapx/conf/nginx.conf.mustache`,
"utf8"
);
const proxyparams = fs.readFileSync(
"../nationchains/www/adminapx/static/tpl/nginxproxy_params.mustache",
"utf8"
);
// saved and change nginx conf
if (!fs.existsSync("/etc/nginx/nginxconf.saved")) {
fs.moveSync("/etc/nginx/nginx.conf", "/etc/nginx/nginxconf.saved");
console.log(
"your previous /etc/nginx/nginx.conf was backup in /etc/nginx/nginxconf.saved"
);
}
fs.outputFileSync(
"/etc/nginx/nginx.conf",
mustache.render(nginxconf, paramconf),
"utf8"
);
fs.outputFileSync(
"/etc/nginx/proxy_params",
mustache.render(proxyparams, paramconf),
"utf8"
);
if (!fs.existsSync(paramconf.nginx.logs)) fs.mkdirSync(paramconf.nginx.logs);
paramconf.nginx.firstinstall = true;
fs.outputJsonSync("../nationchains/tribes/conf.json", paramconf, {
space: 2,
});
return Www.create(paramconf.nginx);
};
*/
Wwws.create = (paramnginx) => {
/**
* Create an nginx conf to make available a spaceweb for a tribe /www/appname/
*
*/
const res = {
status: 200,
ref: "Www",
msg: "successfulwww",
data: { website: paramnginx.website },
};
const nginxwebsite = fs.readFileSync(
`${conf.dirapi}/adminapi/www/adminapx/conf/nginxmodelwebsite.conf.mustache`,
"utf8"
);
fs.outputFileSync(
`./${paramnginx.fswww}www/nginx_${paramnginx.website}.conf`,
mustache.render(nginxwebsite, paramnginx),
"utf8"
);
if (!fs.existsSync(`./${paramnginx.fswww}www/${paramnginx.website}`)) {
//See later how to generate specific template of webapp
fs.mkdirSync(`./${paramnginx.fswww}www/${paramnginx.website}`);
}
if (!fs.existsSync(`./${paramnginx.fswww}www/cdn`)) {
//See later how to generate specific template of webapp
fs.mkdirSync(`./${paramnginx.fswww}www/cdn`);
}
//restart nginx
const { exec } = require("child_process");
exec(paramnginx.restart, (error, stdout, stderr) => {
if (error) {
if (paramnginx.firstinstall) {
console.log("\x1b[42m", error, stdout, stderr, "x1b[0m");
}
//@todo supprimer la derniere config et relancer
res.status = 500;
res.msg = "nginxError";
res.data = { msg: `${error}<br>${stdout}<br>${stderr}` };
} else {
if (paramnginx.firstinstall) {
// the tribes/conf.json is saved in apxtriinstall
console.log(
`\x1b[42m###########################################################################################\x1b[0m\n\x1b[42mWellcome into apxtri, you can now 'yarn dev' for dev or 'yarn startpm2' for prod or \n'yarn unittest' for testing purpose. Access to your town here \x1b[0m\x1b[32mhttp://adminapx\x1b[0m \x1b[42m \nto finish your town setup. Don't forget to set your localhost /etc/hosts by adding 127.0.0.1 adminapx or {LAN IP} adminapx . Check README's project to learn more. \x1b[0m\n\x1b[42m###########################################################################################\x1b[0m`
);
} else {
// add website to tribe conf
}
}
});
return res;
};
Wwws.setssl = () => {
// Run process to change nginx conf to get a ssl
};
Wwws.configlist = (tribeId) => {
//if accessright R return list of conf parameter {webapp:{conf parameter}}
const res = { status: 200, data: {} };
return res;
};
module.exports = Wwws;

View File

@@ -0,0 +1,6 @@
{
"validcheck":"Your data are valid",
"typedoesnnotexistinschema":"This type in your propertie is not manage by Checkjson.js",
"dataerrpropertie":"Check your data that not fit your schema rules propertie",
"dataerrpropertiesrequired":"This propertie is required and not present in your data"
}

View File

@@ -0,0 +1,7 @@
{
"validcheck":"Your data are valid",
"typedoesnnotexistinschema":"This type in your propertie is not manage by Checkjson.js",
"dataerrpropertie":"Check your data that not fit your schema rules propertie",
"dataerrpropertiesrequired":"This propertie is required and not present in your data",
"externalrefnotload":"You have in your schema a relative sub schema, you have to load it before"
}

View File

@@ -0,0 +1,7 @@
{
"nationnamedesc":"Nom unique d'une nation dans un monde social",
"nationnamedesclong":"Nom unique identifiant une nation dans le monde social créer par un maire d'une nouvelle ville avec un contrat sociale specifique",
"nationnameinfo":"<p> Une nation posséde un nom unique, un contrat sociale (contracts/name.js) signé et validé par tous les maires des villes associées à cette nation </p>",
"statusdesc":"Statut de la nation",
"statusactive":"Nation active"
}

View File

@@ -0,0 +1,9 @@
{
"missingconf":"Il manque un smtp/sms valide pour {{tribe}} ou sur le serveur /conf.json",
"missingdata":"Il manque des données obligatoire dans data {{#missingk}} {{.}} {{/missingk}}",
"missingfile":"Le ou les fichiers suivants n'existent pas {{#missingfile}} {{.}} {{/missingfile}}",
"errsendmail":"Une erreur s'est produite lors de l'envoie de l'email",
"successfullsentemail":"Email correctement envoyé",
"errsendsms":"Une erreur s'est produite lors de l'envoie du sms",
"successfullsentsms":"Sms bien envoyé à {{To}}"
}

5
models/lg/Odmdb_en.json Normal file
View File

@@ -0,0 +1,5 @@
{
"schemanotfound":"Schema not found in {{{fullpath}}}",
"pathnamedoesnotexist":"ObjectPath or objectName does not exist {{{indexpath}}}",
"objectfiledoesnotexist":"Requested index does not exist here: {{{objectpath}}}"
}

19
models/lg/Odmdb_fr.json Normal file
View File

@@ -0,0 +1,19 @@
{
"alreadyexist": "Un object {{objectname}} avec la clé {{key}} existe déjà avec {{val}}",
"doesnotexist": "L'object {{objectname}} n'existe pas avec {{key}}:{{val}}",
"getschema": "Schema {{{conf.name}}}",
"schemanotfound": "Schema introuvable dans {{{schemaPath}}}",
"pathnamedoesnotexist": "Le repertoire n'existe pas {{{indexpath}}}",
"objectfiledoesnotexist": "Le fichier n'existe pas {{{objectpath}}}",
"cudsuccessfull": "Mise à jour effectuée avec succés",
"successfulcreatewithoutemail":"Créer sans envoi d'email",
"successfulcreatewitemail":"Créer vous allez recevoir un email",
"missingprimarykey": "Il manque une clé primaire apxid pour stocker et identifier les objects",
"unconsistencyapxidx": "L'index {{name}} doit contenir en objkey au moins {{apxid}} car keyval n'est pas unique",
"profilnotallow": "Vous n'avez pas le profil de {{profils}}, cette action n'est pas authorisée",
"successreindex": "Objet reindexé à partir des items, vos index sont à jour",
"indexexist":"L'indexe existe",
"typenotavailable":"Le type: {{type}} pour la propertie : {{propertie}} de l'object :{{objectPathname}} n'est pas pris en compte pour l'indexation",
"objectslist":"Liste des objects d'apxtri et de {{tribe}}",
"errordelete":"Desolé impossible de supprimer ce compte"
}

8
models/lg/Pagans_en.json Normal file
View File

@@ -0,0 +1,8 @@
{
"aliasexist":"This alias {{alias]} exist",
"aliasdoesnotexist":"This alias {{alias}} does not exist ",
"personexist":"This person {{alias}} exist for {{tribeid}}",
"successfullcreate": "This identity {{alias}} creation was successfull. {{#withemail}} An email was sent to {{email}}, if you do not receive it, please download your keys before living this page.{{/withemail}}",
"successfulluppdate": "Your alias as a Person is now update into {{tribeid}}",
"tribedoesnotexist": "Your tribe {{tribeid}} does not exist in this town"
}

14
models/lg/Pagans_fr.json Normal file
View File

@@ -0,0 +1,14 @@
{
"aliasexist": "Cet alias {{alias}} existe",
"emailerr": "Verifier votre email {{email}}",
"aliasorprivkeytooshort": "Vérifiez votre alias et votre clé privée",
"aliasdoesnotexist": "Cet alias {{alias}} n'existe pas",
"personexist": "Cette personne {{alias}} existe pour {{tribeid}}",
"persondoesnotexist": "Cette personne {{alias}} n'existe pas pour {{tribeid}}",
"successfulcreate":"Votre identité {{alias}} a été créee à partir de vos clés.{{#emailsent}} Un email a été envoyé à {{email}}, si vous ne le recevez pas, veuillez télécharger vos clés avant de quitter cette page.{{/emailsent}} {{#emailerror}}Un problème lors de l'envoi sur {{email}} s'est produit. Veuillez télécharger vos clés avant de quitter cette page.{{/emailerror}}<br>{{#createperson}}Félicitation Votre compte est ouvert chez {{tribe}}. {{/createperson}} {{#errorperson}}Attention votre identité n'a pas été associée à {{tribe}}. Vous pourrez rejoindre {{tribe}} dans l'écran suivant. {{/errorperson}}",
"successfulluppdate": "Votre alias en tant que Personne est maintenant mis à jour dans {{tribeid}}",
"errcreate": "Desolé, un probléme inconnu empeche la creation",
"logout": "Votre token a été supprimé du server",
"emailnotfound":"Auncune identité n'a été enregistrée sur {{tribe}} avec l'email {{email}} ",
"recoveryemailsent":"Votre email: {{email}} dispose de {{nbalias}} identitées sur {{tribe}}, nous avons envoyés {{nbsent}} messages sans problèmes."
}

3
models/lg/Tribes_fr.json Normal file
View File

@@ -0,0 +1,3 @@
{
"actionmissing":"L'action {{data.action}} n'existe pas pour la tribut {{data.tribe}}."
}

View File

@@ -0,0 +1,8 @@
{
"errrequest": "Backend seems not available",
"missingheader": "Some header miss to have a valid request: {{#data}} {{.}} {{/data}}",
"tribeiddoesnotexist": "Header xtribe: {{data.xtribe}} does not exist in this town you cannot access",
"authenticated": "Your alias{{{data.xalias}}} is authenticated",
"notauthenticated": "Your alias: {{data.xalias}} is not authenticated {{^data.aliasexists}} and this alias does not exist !{{/data.aliasexists}}",
"forbiddenAccessright": "Alias {{data.xalias}} has not access right to act {{data.action}} onto object {{data.object}} for tribe {{mor.xworkon}}"
}

View File

@@ -0,0 +1,10 @@
{
"errrequest": "Le serveur ne semble pas répondre",
"unconsistentpgp": "Vos clés ne sont pas conforme {{err}}",
"missingheader": "Certains en-têtes manquent pour avoir une requête valide : {{#data}} {{.}} {{/data}}",
"tribeiddoesnotexist": "L'en-tête xtribe : {{data.xtribe}} n'existe pas dans cette ville, vous ne pouvez pas y accéder",
"authenticated": "Votre alias {{{data.xalias}}} est authentifié",
"notauthenticated": "Votre alias : {{data.xalias}} n'est pas authentifié {{^data.aliasexists}} et cet alias n'existe pas !{{/data.aliasexists}}",
"forbiddenAccessright": "L'alias {{data.xalias}} n'a pas le droit d'agir {{data.action}} sur l'objet {{data.object}} pour la tribu {{mor.xworkon}}",
"signaturefailled": "Desolé votre signature n'est pas valide pour cet alias."
}

View File

@@ -0,0 +1,158 @@
/*
Unit testing
*/
const assert = require("assert");
const Checkjson = require("../Checkjson.js");
const conf = require(`../../conf/townconf.json`);
const ut = { name: "Checkjson" };
const schema = {
$schema: "http://json-schema.org/schema#",
title: "Dummy schema to test Checkjson.js",
description: "Checkjson is use on server as well as into a browser",
$comment: "We change schema type on the fly to simplify the test",
type: "object",
properties: {
totest: {},
},
};
const testproperties = [
{
name: "test0",
data: { totest: true },
properties: { totest: { type: "boolean" } },
status: 200,
},
{
name: "test1",
data: { totest: "blabla" },
properties: { totest: { type: "string" } },
status: 200,
},
{
name: "test2",
data: { totest: 123 },
properties: { totest: { type: "string" } },
status: 417,
},
{
name: "test3",
data: { totest: 123.13 },
properties: { totest: { type: "integer" } },
status: 417,
},
{
name: "test4",
data: { totest: 123 },
properties: { totest: { type: "number" } },
status: 200,
},
{
name: "test5",
data: { totest: 12312 },
properties: { totest: { type: "number" } },
status: 200,
},
{
name: "test6",
data: { totest: 12.313 },
properties: { totest: { type: "float" } },
status: 200,
},
{
name: "test7",
data: { totest: "blablab sfde" },
properties: { totest: { type: "string", minLength: 1111 } },
status: 417,
},
{
name: "test8",
data: { totest: "blablab sfde" },
properties: { totest: { type: "string", minLength: 4, maxLength: 128 } },
status: 200,
},
{
name: "test9",
data: { totest: 12 },
properties: { totest: { type: "integer", multipleOf: 3 } },
status: 200,
},
{
name: "test10",
data: { totest: 9 },
properties: {
totest: { type: "number", minimum: -10, exclusiveMaximum: 10 },
},
status: 200,
},
{
name: "test11",
data: { totest: 10 },
properties: {
totest: { type: "number", minimum: -10, exclusiveMaximum: 10 },
},
status: 417,
},
{
name: "test12",
data: { totest: "gfhrtabcdgfr" },
properties: { totest: { type: "string", pattern: /.*abc.*/ } },
status: 200,
},
{
name: "test13",
data: { totest: "toto@google.com" },
properties: { totest: { type: "string", format: "email" } },
status: 200,
},
{
name: "test14",
data: { totest: "Aze123@0" },
properties: { totest: { type: "string", format: "password" } },
status: 200,
},
{
name: "test15",
data: { totest: "value1" },
properties: {
totest: { type: "string", enum: ["value1", "value2", "value3"] },
},
status: 200,
},
{
name: "test16",
data: { totest: ["t1", "t2"] },
properties: { totest: { type: ["string", "number"] } },
status: 417,
},
{
name: "test17",
data: { totest: 12 },
properties: { totest: { type: ["string", "number"] } },
status: 200,
},
];
ut.testproperties = (options) => {
let msg = "";
testproperties.forEach((t) => {
schema.properties = t.properties;
const res = Checkjson.schema.data(schema, t.data);
if (res.status != t.status) {
msg = msg == "" ? "Unconsistent testproperties() name list: " : `${msg},`;
if (options.verbose) {
console.log(t);
console.log(res);
}
msg += res.err.map((e) => ` ${t.name} ${e.info}`);
}
});
return assert.deepEqual(msg, "", msg);
};
ut.run = (options) => {
console.log("Test Checkjson properties");
ut.testproperties(options);
};
module.exports = ut;

43
models/unittest/Odmdb.js Normal file
View File

@@ -0,0 +1,43 @@
/*
Unit testing
*/
const assert = require("assert");
const fs = require("fs-extra");
const path = require("path");
const Odmdb = require("../Odmdb.js");
const { generemdp } = require("../toolsbox.js");
const conf = require(`../../conf/townconf.json`);
const ut = { name: "Odmdb" };
/*
Test crud process for any object
*/
ut.crud = (objectPathname, itm, profils) => {
//
// test if exist
// if not test create
// test to read
// test update
// test delete
const res = { status: 200, err: [] };
return res;
};
const testvar={alias:"tutu", passphrase:"",privatekey:"", publickey:""}
const testitms=[
{objectPathname:`../nationchains/pagans`,
itm:{alias:'toutou', publickey:}}
]
ut.run = (options) => {
let msg=""
testitms.forEach(i=>{
ut.crud(i)
//si erreur add msg+++
})
assert.deepEqual(msg, "", msg);
};
module.exports = ut;